def test_queue_remove_job(): """Remove a job from the queue and from the persistent list""" api = common.get_api() td = pathlib.Path(tempfile.mkdtemp(prefix="persistent_uj_list_")) pujl_path = td / "joblistdir" # create some metadata dataset_dict = common.make_dataset_dict(hint="create-with-resource") # post dataset creation request data = create_dataset(dataset_dict=dataset_dict, api=api) joblist = UploadQueue(api=api, path_persistent_job_list=pujl_path) # disable all daemons, so no uploading happens joblist.daemon_compress.join() joblist.daemon_upload.join() joblist.daemon_verify.join() uj = joblist.new_job(dataset_id=data["id"], paths=[dpath]) assert uj.state == "init" joblist.remove_job(uj.dataset_id) assert uj not in joblist assert not joblist.jobs_eternal.job_exists(uj.dataset_id) # adding it again should work uj2 = joblist.new_job(dataset_id=data["id"], paths=[dpath]) assert uj2 in joblist assert uj2.__getstate__() == uj.__getstate__()
def get_endpoint(tenant_name, service, username, password, hostname, keystone_port): header = {'Content-Type': 'application/json'} params = json.dumps({ "auth": { "tenantName": tenant_name, "passwordCredentials": { "username": username, "password": password } } }) method = 'POST' path = '/v2.0/tokens' response = get_api(method, path, params, header, hostname, keystone_port) if response.status == 200: data = json.loads(response.read()) for i in range(len(data['access']['serviceCatalog'])): if data['access']['serviceCatalog'][i]['name'] == service: endpoint = data['access']['serviceCatalog'][i]['endpoints'][0][ 'adminURL'] return endpoint if response.status == 400: error = 'Incorect username/password check again' return redirect(url_for('login', error=error))
def test_upload_private(qtbot, monkeypatch): """Upload a private test dataset""" mw = DCORAid() QtWidgets.QApplication.processEvents(QtCore.QEventLoop.AllEvents, 300) dlg = UploadDialog(mw.panel_upload) mw.panel_upload._dlg_manual = dlg dlg.finished.connect(mw.panel_upload.on_upload_manual_ready) # Fill data for testing dlg._autofill_for_testing() # set visibility to private dlg.comboBox_vis.setCurrentIndex(dlg.comboBox_vis.findData("private")) # Avoid message boxes monkeypatch.setattr(QMessageBox, "question", lambda *args: QMessageBox.Yes) # Commence upload dlg.on_proceed() dataset_id = dlg.dataset_id assert dataset_id is not None for ii in range(200): # give it 20secs to upload state = mw.panel_upload.jobs[0].get_status()["state"] if state == "done": break time.sleep(.1) else: raise ValueError("Job did not complete, state: '{}'".format(state)) mw.close() # make sure the dataset is private api = common.get_api() dataset_dict = api.get(api_call="package_show", id=dataset_id) assert dataset_dict["private"] assert isinstance(dataset_dict["private"], bool)
def test_load_with_existing_dataset(): api = common.get_api() # create some metadata dataset_dict = common.make_dataset_dict(hint="task_test") # post dataset creation request dataset_dict_with_id = create_dataset(dataset_dict=dataset_dict, resources=[dpath], api=api) task_path = common.make_upload_task(dataset_dict=dataset_dict_with_id, resource_paths=[str(dpath)], resource_names=[dpath.name]) uj = task.load_task(task_path, api=api) assert uj.dataset_id == dataset_dict_with_id["id"] # skipping the upload should work, since it's already uploaded uj.set_state("online") uj.task_verify_resources() for ii in range(600): uj.task_verify_resources() if uj.state != "done": time.sleep(.1) continue else: break else: raise AssertionError("State not 'done' - No verification within 60s!")
def test_persistent_upload_joblist_basic(): """basic job tests""" api = common.get_api() td = pathlib.Path(tempfile.mkdtemp(prefix="persistent_uj_list_")) pujl_path = td / "joblistdir" task_path = common.make_upload_task() pujl = PersistentUploadJobList(pujl_path) uj = load_task(task_path, api=api) # add a job pujl.immortalize_job(uj) assert uj in pujl assert uj.dataset_id in pujl # find that job uj_same = pujl.summon_job(uj.dataset_id, api=api) assert uj_same is not uj, "not same instance" assert uj_same.__getstate__() == uj.__getstate__(), "same data" ids = pujl.get_queued_dataset_ids() assert uj.dataset_id in ids # remove a job assert pujl.job_exists(uj.dataset_id) assert pujl.is_job_queued(uj.dataset_id) assert not pujl.is_job_done(uj.dataset_id) pujl.obliterate_job(uj.dataset_id) assert uj not in pujl assert not pujl.job_exists(uj.dataset_id)
def tweet_gif(gif, status): logger.debug("Starting to tweet") api = get_api() uploaded_gif = api.UploadMediaChunked(media=gif) logger.debug('media ID: {0}'.format(uploaded_gif)) status = api.PostUpdate(status=status, media=uploaded_gif) logger.debug('Finished tweet: {0}'.format(status)) return status
def test_load_basic(): api = common.get_api() task_path = common.make_upload_task(task_id="zpowiemsnh", resource_names=["humdinger.rtdc"]) assert task.task_has_circle(task_path) uj = task.load_task(task_path, api=api) assert uj.task_id == "zpowiemsnh" assert uj.resource_names == ["humdinger.rtdc"]
def test_dataset_id_does_not_exist(): api = common.get_api() # create a fake ID dataset_id = str(uuid.uuid4()) # create a new task with the fake dataset ID task_path = common.make_upload_task(dataset_id=dataset_id) # create the upload job with pytest.raises(dcoraid.api.APINotFoundError, match=dataset_id): task.load_task(task_path, api=api)
def test_missing_owner_org(): api = common.get_api() # create some metadata dataset_dict = common.make_dataset_dict(hint="task_test") dataset_dict.pop("owner_org") task_path = common.make_upload_task(dataset_dict=dataset_dict) assert not task.task_has_circle(task_path) with pytest.raises(dcoraid.api.APIConflictError, match="A circle must be provided"): task.load_task(task_path, api=api)
def test_initialize(): api = common.get_api() # create some metadata bare_dict = common.make_dataset_dict(hint="create-with-resource") # create dataset (to get the "id") dataset_dict = create_dataset(dataset_dict=bare_dict, api=api) uj = job.UploadJob(api=api, dataset_id=dataset_dict["id"], resource_paths=[dpath]) assert uj.state == "init"
def test_no_ids(): api = common.get_api() # create some metadata dataset_dict = common.make_dataset_dict(hint="task_test") task_path = common.make_upload_task(dataset_dict=dataset_dict, resource_paths=[str(dpath)], resource_names=[dpath.name], task_id=None) with pytest.raises(ValueError, match="or pass the dataset_id via the dataset_kwargs"): task.load_task(task_path, api=api)
def test_dataset_create_same_resource(): """There should be an error when a resource is added twice""" api = common.get_api() # create some metadata dataset_dict = common.make_dataset_dict(hint="create-with-same-resource") # post dataset creation request data = dataset.create_dataset(dataset_dict=dataset_dict, api=api) dataset.add_resource(dataset_id=data["id"], path=dpath, api=api) with pytest.raises(APIConflictError): # Should not be able to upload same resource twice dataset.add_resource(dataset_id=data["id"], path=dpath, api=api)
def test_resource_supplements(): task_path = common.make_upload_task(resource_paths=[dpath], resource_supplements=[{ "chip": { "name": "7x2", "master name": "R1" } }]) uj = task.load_task(task_path, api=common.get_api()) assert uj.supplements[0]["chip"]["name"] == "7x2" assert uj.supplements[0]["chip"]["master name"] == "R1"
def test_persistent_upload_joblist_error_exists(): """test things when a job is done""" api = common.get_api() td = pathlib.Path(tempfile.mkdtemp(prefix="persistent_uj_list_")) pujl_path = td / "joblistdir" task_path = common.make_upload_task() pujl = PersistentUploadJobList(pujl_path) uj = load_task(task_path, api=api) pujl.immortalize_job(uj) with pytest.raises(FileExistsError, match="already present at"): pujl.immortalize_job(uj)
def get_server_list(tenant_id, token, hostname, nova_port): header = {'Content-Type': 'application/json', 'X-Auth-Token': token} method = 'GET' params = '' path = '/v2/' + tenant_id + '/servers/detail?all_tenants=1' response = get_api(method, path, params, header, hostname, nova_port) if response.status == 200: servers_list = json.loads(response.read()) return servers_list if response.status == 400: error = 'Time out' return redirect(url_for('login'))
def get_compute_statistics(tenant_id, token, hostname, nova_port): header = {'Content-Type': 'application/json', 'X-Auth-Token': token} method = 'GET' params = '' path = '/v2/' + tenant_id + '/os-hypervisors/statistics' response = get_api(method, path, params, header, hostname, nova_port) if response.status == 200: compute_statistics = json.loads(response.read()) return compute_statistics if response.status == 400: error = 'Time out' return redirect(url_for('login', error=error))
def get_tenant_list(token, hostname, keystone_port): header = {'Content-Type': 'application/json', 'X-Auth-Token': token} method = 'GET' params = '' path = '/v2.0/tenants' response = get_api(method, path, params, header, hostname, keystone_port) if response.status == 200: tenants_list = json.loads(response.read()) return tenants_list if response.status == 400: error = 'Time out' return redirect(url_for('login'))
def get_tenant_usage(tenant_admin_id, tenant_id, token, hostname, nova_port): header = {'Content-Type': 'application/json', 'X-Auth-Token': token} method = 'GET' params = '' path = '/v2/' + tenant_admin_id + '/os-simple-tenant-usage/' + tenant_id response = get_api(method, path, params, header, hostname, nova_port) if response.status == 200: tenant_usage = json.loads(response.read()) return tenant_usage if response.status == 400: error = 'Time out' return redirect(url_for('login', error=error))
def get_volumes_list(tenant_id, token, hostname, cinder_port): method = 'GET' params = '' path = '/v2/'+tenant_id+'/volumes?all_tenants=1' header = {'Content-Type': 'application/json', 'X-Auth-Token': token} response = get_api(method, path, params, header, hostname, cinder_port) if response.status == 200: volumes = json.loads(response.read()) return volumes if response.status == 400: error = 'Time out' return redirect(url_for('login', error=error))
def get_network(token, hostname, neutron_port): method = 'GET' params = '' path = '/v2.0/networks' header = {'Content-Type': 'application/json', 'X-Auth-Token': token} response = get_api(method, path, params, header, hostname, neutron_port) if response.status == 200: ports = json.loads(response.read()) return ports if response.status == 400: error = 'Time out' return redirect(url_for('login', error=error))
def test_load_with_update(): api = common.get_api() task_path = common.make_upload_task(task_id="blackfalcon", resource_names=["marvel.rtdc"]) assert task.task_has_circle(task_path) uj = task.load_task(task_path, api=api, update_dataset_id=True) assert uj.task_id == "blackfalcon" assert uj.resource_names == ["marvel.rtdc"] # Load task and check dataset_id with open(task_path) as fd: task_dict = json.load(fd) assert task_dict["dataset_dict"]["id"] == uj.dataset_id
def test_resource_supplements_with_other_files(): task_path = common.make_upload_task( resource_paths=[__file__, dpath], resource_names=["test.py", "other_data.rtdc"], resource_supplements=[{}, { "chip": { "name": "7x2", "master name": "R1" } }]) uj = task.load_task(task_path, api=common.get_api()) assert len(uj.supplements[0]) == 0
def test_custom_dataset_dict_2(): api = common.get_api() # post dataset creation request task_path = common.make_upload_task(dataset_dict=True, resource_paths=[str(dpath)], resource_names=[dpath.name]) dataset_dict = common.make_dataset_dict() dataset_dict["authors"] = "Captain Hook!" uj = task.load_task(task_path, api=api, dataset_kwargs=dataset_dict) # now make sure the authors were set correctly ddict = api.get("package_show", id=uj.dataset_id) assert ddict["authors"] == "Captain Hook!"
def test_resource_name_lengths(): """Make sure ValueError is raised when list lengths do not match""" task_path = common.make_upload_task(resource_paths=[__file__, dpath], resource_names=["other_data.rtdc"], resource_supplements=[{}, { "chip": { "name": "7x2", "master name": "R1" } }]) with pytest.raises(ValueError, match="does not match number of resource names"): task.load_task(task_path, api=common.get_api())
def main(): api = get_api() if not api: return username = raw_input("Username to lookup (leave blank for your own): ").strip() if username == "": username = api.user.screen_name try: user = api.get_user(screen_name = username) except tweetpony.APIError as err: print "Oh no! The user's profile could not be loaded. Twitter returned error #%i and said: %s" % (err.code, err.description) else: return user.followers_count
def get_token(tenant_name, username, password, hostname, keystone_port): header = {'Content-Type': 'application/json'} params = json.dumps( {"auth": {"tenantName": tenant_name, "passwordCredentials": {"username": username, "password": password}}}) method = 'POST' path = '/v2.0/tokens' response = get_api(method, path, params, header, hostname, keystone_port) if response.status == 200: data = json.loads(response.read()) token = data['access']['token']['id'] return token if response.status == 400: error = 'Incorect username/password check again' return redirect(url_for('login', error=error))
def test_wrong_ids(): api = common.get_api() # create some metadata dataset_dict = common.make_dataset_dict(hint="task_test") dataset_dict["id"] = "peter" task_path = common.make_upload_task( dataset_dict=dataset_dict, dataset_id="hans", # different id resource_paths=[str(dpath)], resource_names=[dpath.name]) with pytest.raises(ValueError, match="I got the following IDs: from upload job " + "state: hans; from dataset dict: peter"): task.load_task(task_path, api=api)
def test_load_with_existing_dataset_map_from_task_dict_update(): api = common.get_api() # create some metadata dataset_dict = common.make_dataset_dict(hint="task_test") # post dataset creation request task_path = common.make_upload_task(dataset_dict=dataset_dict, resource_paths=[str(dpath)], resource_names=[dpath.name], task_id="xwing") map_task_to_dataset_id = {} uj = task.load_task(task_path, api=api, map_task_to_dataset_id=map_task_to_dataset_id) assert uj.task_id == "xwing" assert map_task_to_dataset_id["xwing"] == uj.dataset_id
def test_queue_create_dataset_with_resource(): api = common.get_api() # create some metadata dataset_dict = common.make_dataset_dict(hint="create-with-resource") # post dataset creation request data = create_dataset(dataset_dict=dataset_dict, api=api) joblist = UploadQueue(api=api) joblist.new_job(dataset_id=data["id"], paths=[dpath]) for _ in range(600): # 60 seconds to upload if joblist[0].state == "done": break time.sleep(.1) else: assert False, "Job not finished: {}".format(joblist[0].get_status())
def test_save_load(): api = common.get_api() # create some metadata bare_dict = common.make_dataset_dict(hint="create-with-resource") # create dataset (to get the "id") dataset_dict = create_dataset(dataset_dict=bare_dict, api=api) uj = job.UploadJob(api=api, dataset_id=dataset_dict["id"], resource_paths=[dpath], task_id="hanspeter") td = pathlib.Path(tempfile.mkdtemp(prefix="task_")) task_path = td / "test.dcoraid-task" task.save_task(uj, path=task_path) uj2 = task.load_task(task_path, api=api) assert uj.dataset_id == uj2.dataset_id assert uj.paths[0].samefile(uj2.paths[0])
def get_storage_url(tenant_name, service, username, password, hostname, keystone_port): header = {'Content-Type': 'application/json'} params = json.dumps( {"auth": {"tenantName": tenant_name, "passwordCredentials": {"username": username, "password": password}}}) method = 'POST' path = '/v2.0/tokens' response = get_api(method, path, params, header, hostname, keystone_port) if response.status == 200: data = json.loads(response.read()) for i in range(len(data['access']['serviceCatalog'])): if data['access']['serviceCatalog'][i]['name'] == service: storage_url = data['access']['serviceCatalog'][i]['endpoints'][0]['internalURL'] return storage_url if response.status == 400: error = 'Incorect username/password check again' return redirect(url_for('login', error=error))
def get_compute_list(tenant_id, token, hostname, nova_port, node_id=None): header = {'Content-Type': 'application/json', 'X-Auth-Token': token} method = 'GET' params = '' if not node_id: path = '/v2/' + tenant_id + '/os-hypervisors' else: path = '/v2/' + tenant_id + '/os-hypervisors/' + node_id response = get_api(method, path, params, header, hostname, nova_port) compute_list = response.read() if response.status == 200: compute_list = json.loads(compute_list) return compute_list if response.status == 400: error = 'Time out' return redirect(url_for('login'))
def findFollows(author): """ """ api = get_api() if not api: return username = author if username == "": username = api.user.screen_name try: user = api.get_user(screen_name=username) except tweetpony.APIError as err: print "Twitter returned error #%i: %s" % (err.code, err.description) else: follows = user.followers_count return follows
def test_resource_supplements_must_be_empty_for_non_rtdc(): task_path = common.make_upload_task( resource_paths=[__file__, dpath], resource_names=["test.py", "other_data.rtdc"], resource_supplements=[{ "chip": { "name": "7x2", "master name": "R1" } }, { "chip": { "name": "7x2", "master name": "R1" } }]) with pytest.raises(ValueError, match="supplements must be empty"): task.load_task(task_path, api=common.get_api())