def test_marshmallow_load(app, db, es, test_data, search_url, search_class): """Test marshmallow loader.""" app.config['RECORDS_REST_DEFAULT_LOADERS'] = { 'application/json': marshmallow_loader(_TestMetadataSchema) } with app.test_client() as client: HEADERS = [ ('Accept', 'application/json'), ('Content-Type', 'application/json'), ('If-Match', '"0"'), ] # Create record req_data = test_data[0] res = client.post(search_url, data=json.dumps(req_data), headers=HEADERS) assert res.status_code == 201 # Check that the returned response matches the stored data original_res_data = get_json(res) model_record = RecordMetadata.query.one() assert original_res_data['metadata'] == model_record.json # Try to modify the "control_number" req_data = deepcopy(original_res_data['metadata']) req_data['control_number'] = 42 req_url = original_res_data['links']['self'] res = client.put(req_url, data=json.dumps(req_data), headers=HEADERS) res_data = get_json(res) model_record = RecordMetadata.query.one() assert res_data['metadata'] == original_res_data['metadata'] assert res_data['metadata'] == model_record.json
def check_incomplete_sync(): """ Check for any sync tasks that are in an Incomplete state. These are not paused or locked, but are the orange 100% complete ones in the UI """ repo_list = helpers.get_json( helpers.KATELLO_API + "/content_view_versions") # Extract the list of repo ids, then check the state of each one. incomplete_sync = False for repo in repo_list['results']: for repo_id in repo['repositories']: repo_status = helpers.get_json( helpers.KATELLO_API + "/repositories/" + str(repo_id['id'])) if repo_status['content_type'] == 'puppet': if repo_status['last_sync']['state'] == 'stopped': if repo_status['last_sync']['result'] == 'warning': incomplete_sync = True msg = "Repo ID " + str(repo_id['id']) + " Sync Incomplete" helpers.log_msg(msg, 'DEBUG') # If we have detected incomplete sync tasks, ask the user if they want to export anyway. # This isn't fatal, but *MAY* lead to inconsistent repositories on the dieconnected sat. if incomplete_sync: msg = "Incomplete sync jobs detected" helpers.log_msg(msg, 'WARNING') answer = helpers.query_yes_no("Continue with export?", "no") if not answer: msg = "Export Aborted" helpers.log_msg(msg, 'ERROR') sys.exit(-1) else: msg = "Export continued by user" helpers.log_msg(msg, 'INFO')
def test_marshmallow_load(app, db, es, test_data, search_url, search_class): """Test marshmallow loader.""" app.config['RECORDS_REST_DEFAULT_LOADERS'] = { 'application/json': marshmallow_loader(_TestMetadataSchema)} with app.test_client() as client: HEADERS = [ ('Accept', 'application/json'), ('Content-Type', 'application/json') ] # Create record req_data = test_data[0] res = client.post( search_url, data=json.dumps(req_data), headers=HEADERS) assert res.status_code == 201 # Check that the returned response matches the stored data original_res_data = get_json(res) model_record = RecordMetadata.query.one() assert original_res_data['metadata'] == model_record.json # Try to modify the "control_number" req_data = deepcopy(original_res_data['metadata']) req_data['control_number'] = 42 req_url = original_res_data['links']['self'] res = client.put(req_url, data=json.dumps(req_data), headers=HEADERS) res_data = get_json(res) model_record = RecordMetadata.query.one() assert res_data['metadata'] == original_res_data['metadata'] assert res_data['metadata'] == model_record.json
def test_sort(app, indexed_records, search_url): """Test invalid accept header.""" with app.test_client() as client: res = client.get(search_url, query_string={'sort': '-year'}) assert res.status_code == 200 # Min year in test records set. assert get_json(res)['hits']['hits'][0]['metadata']['year'] == 4242 res = client.get(search_url, query_string={'sort': 'year'}) assert res.status_code == 200 # Max year in test records set. assert get_json(res)['hits']['hits'][0]['metadata']['year'] == 1985
def test_sort(app, indexed_records, search_url): """Test invalid accept header.""" with app.test_client() as client: res = client.get(search_url, query_string={"sort": "-year"}) assert res.status_code == 200 # Min year in test records set. assert get_json(res)["hits"]["hits"][0]["metadata"]["year"] == 4242 res = client.get(search_url, query_string={"sort": "year"}) assert res.status_code == 200 # Max year in test records set. assert get_json(res)["hits"]["hits"][0]["metadata"]["year"] == 1985
def test_query(app, indexed_records, search_url): """Test query.""" with app.test_client() as client: # Valid query syntax res = client.get(search_url, query_string=dict(q='back')) assert len(get_json(res)['hits']['hits']) == 2 # Invalid query syntax (using ES instead of Invenio syntax) res = client.get(search_url, query_string=dict(q='+title:back')) assert res.status_code == 400 data = get_json(res) assert 'message' in data assert data['status'] == 400
def check_running_tasks(): """ Check for any currently running Sync or Export tasks Exits script if any Synchronize or Export tasks are found in a running state. """ tasks = helpers.get_json(helpers.FOREMAN_API + "tasks/") # From the list of tasks, look for any running export or sync jobs. # If e have any we exit, as we can't export in this state. for task_result in tasks['results']: if task_result['state'] == 'running': if task_result['humanized']['action'] == 'Export': msg = "Unable to export - an Export task is already running" helpers.log_msg(msg, 'ERROR') sys.exit(-1) if task_result['humanized']['action'] == 'Synchronize': msg = "Unable to export - a Sync task is currently running" helpers.log_msg(msg, 'ERROR') sys.exit(-1) if task_result['state'] == 'paused': if task_result['humanized']['action'] == 'Export': msg = "Unable to export - an Export task is paused. Please resolve this issue first" helpers.log_msg(msg, 'ERROR') sys.exit(-1) if task_result['humanized']['action'] == 'Synchronize': msg = "Unable to export - a Sync task is paused. Resume any paused sync tasks." helpers.log_msg(msg, 'ERROR') sys.exit(-1) check_incomplete_sync()
def test_create_record_check_acl_priority(app, db, es, es_acl_prepare, test_users): with app.test_client() as client: with db.session.begin_nested(): acl1 = DefaultACL(name='default', schemas=[RECORD_SCHEMA], priority=0, originator=test_users.u1, operation='get') actor1 = SystemRoleActor(name='auth', system_role='any_user', acl=acl1, originator=test_users.u1) acl2 = DefaultACL(name='default', schemas=[RECORD_SCHEMA], priority=1, originator=test_users.u1, operation='get') actor2 = SystemRoleActor(name='auth', system_role='authenticated_user', acl=acl2, originator=test_users.u1) db.session.add(acl1) db.session.add(actor1) db.session.add(acl2) db.session.add(actor2) login(client, test_users.u1) response = client.post(records_url(), data=json.dumps({ 'title': 'blah', 'contributors': [] }), content_type='application/json') assert response.status_code == 201 rest_metadata = get_json(response)['metadata'] assert 'control_number' in rest_metadata index, doctype = schema_to_index(RECORD_SCHEMA) rec_md = current_search_client.get( index=index, doc_type=doctype, id=str( PersistentIdentifier.get( 'recid', rest_metadata['control_number']).object_uuid)) clear_timestamp(rec_md) assert rec_md['_source']['_invenio_explicit_acls'] == [{ 'operation': 'get', 'id': acl2.id, 'timestamp': 'cleared', 'system_role': ['authenticated_user'] }]
def main(paths=[]): for path in paths: message_json = get_json(path) messages = message_json.get("messages", []) data = get_all_stats(messages) graph_stat(data, stat="Characters", period="Month", name="total")
def get_cv(org_id, publish_list): """Get the content views""" # Query API to get all content views for our org cvs = helpers.get_json( helpers.KATELLO_API + "organizations/" + str(org_id) + "/content_views/") ver_list = {} ver_descr = {} ver_version = {} for cv_result in cvs['results']: # We will never publish the DOV if cv_result['name'] != "Default Organization View": # Handle specific includes and excludes if publish_list and cv_result['name'] not in publish_list: msg = "Skipping content view '" + cv_result['name'] + "'" helpers.log_msg(msg, 'DEBUG') continue # Get the ID of each Content View msg = "Processing content view '" + cv_result['name'] + "' " + str(cv_result['id']) helpers.log_msg(msg, 'DEBUG') # Find the next version of the view ver_list[cv_result['id']] = cv_result['id'] ver_descr[cv_result['id']] = cv_result['name'] ver_version[cv_result['id']] = cv_result['next_version'] return ver_list, ver_descr, ver_version
def top_n_stat(n, stat="Messages", period="Month"): """ Print top n messaged person per period in a table """ res = defaultdict(list) for person, path in friends.ALL_FRIENDS: message_json = get_json(path) messages = message_json.get("messages", []) name = message_json.get("participants")[0] message_data = get_all_stats(messages)[stat][period]["total"] for date, count in message_data.items(): res[date].append((name, count)) # We want to sort by date res_list = sorted([[date, count_list] for date, count_list in res.items()]) table = [] for date, count_list in res_list[30:]: date_str = date.strftime(time_format(period)) # Format date by period count_list.sort(key=lambda x: x[1], reverse=True) # Sort by count count_list = count_list[:n] # Truncate to top n table.append([date_str, *[name for name, count in count_list]]) # Only names # table.append([date_str, *["%s: %d" % (name, count) for name, count in count_list]]) # Only names and counts print(tabulate(table, headers=[period, *[str(i) for i in range(1, n + 1)]]))
def generate_averages(paths=friends.ALL_FRIEND_PATHS): stats = ["Characters", "Words", "Messages", "Clusters"] average_stats = [] for path in paths: message_json = get_json(path) messages = message_json.get("messages", []) participant = message_json.get("participants")[0] data = get_all_stats(messages) for sender in data["Characters"]["Month"]: if sender == "total": continue sender_averages = [] for small_stat, big_stat in combinations(stats, 2): sender_averages.append( sum(data[small_stat]["Year"][sender].values()) / sum(data[big_stat]["Year"][sender].values())) if sender == "Zaibo Wang": sender = "Zaibo + %s" % participant average_stats.append([sender, *sender_averages]) average_stats.sort(key=lambda x: x[2], reverse=True) print( tabulate( average_stats, headers=[ "Name", *["%s per %s" % combo for combo in combinations(stats, 2)] ]))
def test_valid_create(app, db, es, test_data, search_url, search_class, content_type): """Test VALID record creation request (POST .../records/).""" with app.test_client() as client: HEADERS = [ ('Accept', 'application/json'), ('Content-Type', content_type) ] HEADERS.append(('Content-Type', content_type)) # Create record res = client.post( search_url, data=json.dumps(test_data[0]), headers=HEADERS) assert res.status_code == 201 # Check that the returned record matches the given data data = get_json(res) for k in test_data[0].keys(): assert data['metadata'][k] == test_data[0][k] # Recid has been added in control number assert data['metadata']['control_number'] # Check location header assert res.headers['Location'] == data['links']['self'] # Record can be retrieved. assert client.get(record_url(data['id'])).status_code == 200 IndexFlusher(search_class).flush_and_wait() # Record shows up in search res = client.get(search_url, query_string={"control_number": data['metadata']['control_number']}) assert_hits_len(res, 1)
def test_max_result_window_valid_params(app, indexed_records, search_url): """Test max_result_window with a valid page/from/size parameters.""" with app.test_client() as client: res = client.get(search_url, query_string={'size': 3}) assert_hits_len(res, 3) res = client.get(search_url, query_string={'page': 1, 'size': 3}) assert_hits_len(res, 3) data = get_json(res) res = client.get(search_url, query_string={'from': 3, 'size': 1}) assert_hits_len(res, 1) data = get_json(res) assert 'self' in data['links'] assert 'next' in data['links'] assert 'prev' in data['links']
def test_from_parameter_invalid_pagination(app, indexed_records, search_url): """Test invalid edge values for "from" parameter pagination.""" with app.test_client() as client: res = client.get(search_url, query_string={'size': 1, 'from': 0}) data = get_json(res) assert res.status_code == 400 assert data['message'] == 'Invalid pagination parameters.' errors = {(e['field'], e['message']) for e in data['errors']} assert errors == {('from', 'Must be at least 1.'), } or \ errors == {('from', 'Must be greater than or equal to 1.'), } res = client.get(search_url, query_string={'size': 1, 'from': 10001}) assert res.status_code == 400 data = get_json(res) assert data['message'] == \ 'Maximum number of 10000 results have been reached.'
def test_valid_put_etag(app, es, test_records, content_type, search_url, search_class): """Test concurrency control with etags.""" HEADERS = [ ('Accept', 'application/json'), ('Content-Type', content_type) ] pid, record = test_records[0] record['year'] = 1234 with app.test_client() as client: url = record_url(pid) res = client.put( url, data=json.dumps(record.dumps()), headers={ 'Content-Type': 'application/json', 'If-Match': '"{0}"'.format(record.revision_id) }) assert res.status_code == 200 assert get_json(client.get(url))['metadata']['year'] == 1234 IndexFlusher(search_class).flush_and_wait() res = client.get(search_url, query_string={"year": 1234}) assert_hits_len(res, 1)
def test_from_parameter_edges(app, indexed_records, search_url): """Test first and last values for "from" parameter pagination.""" with app.test_client() as client: res = client.get(search_url, query_string={'size': 1, 'from': 1}) assert_hits_len(res, 1) data = get_json(res) assert 'self' in data['links'] assert 'next' in data['links'] assert 'prev' not in data['links'] res = client.get(search_url, query_string={'size': 1, 'from': 4}) assert_hits_len(res, 1) data = get_json(res) assert 'self' in data['links'] assert 'next' not in data['links'] assert 'prev' in data['links']
def test_valid_put(app, test_records): """Test VALID record patch request (PATCH .../records/<record_id>).""" pid, record = test_records[0] record['year'] = 1234 with app.test_client() as client: url = record_url(pid) res = client.put(url, data=json.dumps(record.dumps()), headers=HEADERS) assert res.status_code == 200 # Check that the returned record matches the given data assert get_json(res)['metadata']['year'] == 1234 # Retrieve record via get request assert get_json(client.get(url))['metadata']['year'] == 1234
def total_stat_sent(stat="Messages", period="Year"): """ Graph all of a stat sent by YOU """ res = defaultdict(int) for person, path in friends.ALL_FRIENDS: message_json = get_json(path) messages = message_json.get("messages", []) name = message_json.get("participants")[0] data = get_all_stats(messages) message_data = data[stat][period][friends.MY_NAME] for date, count in message_data.items(): res[date] += count res_list = sorted([(date, count) for date, count in res.items()]) dates = [elem[0] for elem in res_list[:-1]] counts = [elem[1] for elem in res_list[:-1]] bar = plt.bar(dates, counts, width=width_dict[period]) ax = plt.subplot(111) ax.xaxis_date() plt.ylabel('# of %s' % stat) plt.title("Total %s Sent %s per %s" % (stat, friends.MY_NAME, period))
def get_cv(org_id): """ Get the version of the Content Views There should only ever be ONE version of the Default Org View. It Should be v1.0 with id=1, but we're verifying here just in case. """ # Query API to get all content views for our org cvs = helpers.get_json(helpers.KATELLO_API + "organizations/" + str(org_id) + "/content_views/") for cv_result in cvs['results']: if cv_result['name'] == "Default Organization View": msg = "CV Name: " + cv_result['name'] helpers.log_msg(msg, 'DEBUG') # Find the current version of the view in the env we are coming from for ver in cv_result['versions']: msg = " Env ID: " + str(ver['environment_ids']) helpers.log_msg(msg, 'DEBUG') msg = " Version: " + str(ver['version']) helpers.log_msg(msg, 'DEBUG') msg = " Version ID: " + str(ver['id']) helpers.log_msg(msg, 'DEBUG') # There will only ever be one DOV return cv_result['id']
def generate_averages(paths=friends.ALL_FRIEND_PATHS): """ Analyze combinations of stats such as "Characters per Words" across all friends in paths""" stats = ["Characters", "Words", "Messages", "Clusters"] average_stats = [] for path in paths: message_json = get_json(path) messages = message_json.get("messages", []) participant = message_json.get("participants")[0]['name'] data = get_all_stats(messages) for sender in data["Characters"]["Month"]: if sender == "total": continue sender_averages = [] for small_stat, big_stat in combinations(stats, 2): sender_averages.append( sum(data[small_stat]["Year"][sender].values()) / sum(data[big_stat]["Year"][sender].values())) if sender == friends.MY_NAME: if ANONYMOUS: sender = "%s + %s" % (friends.MY_NAME, nh.hash_by_name(participant)) else: sender = "%s + %s" % (friends.MY_NAME, participant) average_stats.append([sender, *sender_averages]) average_stats.sort(key=lambda x: x[3], reverse=True) print( tabulate( average_stats, headers=[ "Name", *["%s per %s" % combo for combo in combinations(stats, 2)] ]))
def check_running_tasks(): """ Check for any currently running Sync or Export tasks Exits script if any Synchronize or Export tasks are found in a running state. """ tasks = helpers.get_json( helpers.FOREMAN_API + "tasks/") # From the list of tasks, look for any running export or sync jobs. # If e have any we exit, as we can't export in this state. for task_result in tasks['results']: if task_result['state'] == 'running': if task_result['humanized']['action'] == 'Export': msg = "Unable to export - an Export task is already running" helpers.log_msg(msg, 'ERROR') sys.exit(-1) if task_result['humanized']['action'] == 'Synchronize': msg = "Unable to export - a Sync task is currently running" helpers.log_msg(msg, 'ERROR') sys.exit(-1) if task_result['state'] == 'paused': if task_result['humanized']['action'] == 'Export': msg = "Unable to export - an Export task is paused. Please resolve this issue first" helpers.log_msg(msg, 'ERROR') sys.exit(-1) if task_result['humanized']['action'] == 'Synchronize': msg = "Unable to export - a Sync task is paused. Resume any paused sync tasks." helpers.log_msg(msg, 'ERROR') sys.exit(-1) check_incomplete_sync()
def check_version_views(version_id): """ Check if our version ID belongs to any views, including CCV """ version_in_use = False version_in_ccv = False # Extract a list of content views that the CV version belongs to viewlist = helpers.get_json(helpers.KATELLO_API + "content_view_versions/" + str(version_id)) # If the list is not empty we need to return this fact. A CV that belongs # to NO versions will be a candidate for cleanup. viewlist['composite_content_view_ids'] if viewlist['katello_content_views']: version_in_use = True msg = "Version " + str( viewlist['version']) + " is associated with published CV" helpers.log_msg(msg, 'DEBUG') # We can go further and see if this is associated with a CCV if viewlist['composite_content_view_ids']: version_in_ccv = True return version_in_use, version_in_ccv
def get_cv(org_id): """ Get the version of the Content Views There should only ever be ONE version of the Default Org View. It Should be v1.0 with id=1, but we're verifying here just in case. """ # Query API to get all content views for our org cvs = helpers.get_json( helpers.KATELLO_API + "organizations/" + str(org_id) + "/content_views/") for cv_result in cvs['results']: if cv_result['name'] == "Default Organization View": msg = "CV Name: " + cv_result['name'] helpers.log_msg(msg, 'DEBUG') # Find the current version of the view in the env we are coming from for ver in cv_result['versions']: msg = " Env ID: " + str(ver['environment_ids']) helpers.log_msg(msg, 'DEBUG') msg = " Version: " + str(ver['version']) helpers.log_msg(msg, 'DEBUG') msg = " Version ID: " + str(ver['id']) helpers.log_msg(msg, 'DEBUG') # There will only ever be one DOV return cv_result['id']
def main(): window = Window((-180, -90, 180, 90), "./img/map.gif") # latitude/longitude bounds: 180 and 90deg respectively window.draw_grid(size=15) print(create_heading("ISS Locator")) print("GUI displaying the current location of the ISS") print("usage:") print("- Red pin is your current location") print("- Click it to get the next overhead passes") print("- Click on the ISS to display it's information") user = get_json("https://ipinfo.io/") user_location = [float(n) for n in user["loc"].split(",")] user_locality = f"{user['city']}, {user['region']}, {user['country']}" pin = turtle.Turtle("circle", visible=False) pin.penup() pin.color("red") pin.shapesize(0.4) pin.setposition(user_location[::-1]) pin.showturtle() iss = SpaceStation(window.screen) pin.onclick(lambda x, y: iss.get_next_pass( *pin.pos()[::-1], output=True, locality=user_locality)) window.screen.mainloop()
def get_gpg(org_id): """Get the GPG keys""" # Query API to get all GPG keys for organization gpg = helpers.get_json(helpers.KATELLO_API + "organizations/" + str(org_id) + "/gpg_keys/") return gpg['results']
def get_content_view_info(cvid): """ Return Content View Info for a given CV ID """ cvinfo = helpers.get_json(helpers.KATELLO_API + "content_views/" + str(cvid)) return cvinfo
def get_content_view_info(cvid): """ Return Content View Info for a given CV ID """ cvinfo = helpers.get_json( helpers.KATELLO_API + "content_views/" + str(cvid)) return cvinfo
def get_musicians_from_opengraph(facebook_id, oauth_token): url_to_get_musicians = "https://graph.facebook.com/%s/music?access_token=%s" % (facebook_id, oauth_token) musicians = get_json(url_to_get_musicians) musicians_names = [] if "data" in musicians.keys(): while len(musicians["data"]) > 0: for music in musicians["data"]: if music["category"] == "Musician/band": musicians_names.append(music["name"]) if "paging" in musicians.keys() and "next" in musicians["paging"].keys(): musicians = get_json(musicians["paging"]["next"]) else: break return musicians_names
def get_version_info(self, package_name, version): """fetches realease data for the version :param:`version` of the package :param:`package_name` from Pypi""" try: return get_json(self.json_endpoint.format(package_name=package_name, version=version)) except URLError: raise DataUnavailable()
def get_cv(org_id, target_env, env_list, prior_list, promote_list): """Get the content views""" # Find the ID of the environment we are promoting to and from if not target_env in env_list: msg = "Target environment '" + target_env + "' not found" helpers.log_msg(msg, 'ERROR') if helpers.MAILOUT: helpers.tf.seek(0) output = "{}".format(helpers.tf.read()) helpers.mailout(helpers.MAILSUBJ_FP, output) sys.exit(1) else: target_env_id = env_list[target_env] source_env_id = prior_list[target_env_id] # Query API to get all content views for our org cvs = helpers.get_json( helpers.KATELLO_API + "organizations/" + str(org_id) + "/content_views/") ver_list = {} ver_descr = {} ver_version = {} for cv_result in cvs['results']: # We will never promote to/from the DOV if cv_result['name'] != "Default Organization View": # Handle specific includes and excludes if promote_list and cv_result['name'] not in promote_list: msg = "Skipping content view '" + cv_result['name'] + "'" helpers.log_msg(msg, 'DEBUG') continue # Get the ID of each Content View msg = "Processing content view '" + cv_result['name'] + "'" helpers.log_msg(msg, 'DEBUG') # Find the current version of the view in the env we are coming from for ver in cv_result['versions']: msg = " Found in env_id " + str(ver['environment_ids']) + " view_id " +\ str(ver['id']) helpers.log_msg(msg, 'DEBUG') if source_env_id in ver['environment_ids']: # Extract the name of the source environment so we can inform the user for key, val in env_list.items(): if val == source_env_id: prior_env = key msg = "Found promotable version " + ver['version'] + " of '" +\ cv_result['name'] + "' in " + prior_env helpers.log_msg(msg, 'INFO') print msg # Create a dictionary of CV IDs and the CV vers ID to promote ver_list[cv_result['id']] = ver['id'] ver_descr[cv_result['id']] = cv_result['name'] ver_version[cv_result['id']] = ver['version'] return ver_list, ver_descr, ver_version
def test_valid_patch(app, test_records, test_patch): """Test VALID record patch request (PATCH .../records/<record_id>).""" pid, record = test_records[0] # Check that assert record.patch(test_patch) with app.test_client() as client: # Check that patch and record is not the same value for year. url = record_url(pid) previous_year = get_json(client.get(url))['metadata']['year'] # Patch record res = client.patch(url, data=json.dumps(test_patch), headers=HEADERS) assert res.status_code == 200 # Check that year changed. assert previous_year != get_json(client.get(url))['metadata']['year']
def test_next_in_max_page(app, indexed_records, search_url): """Test page and size parameters.""" with app.test_client() as client: res = client.get(search_url, query_string=dict(page=1, size=2)) assert "next" in res.json['links'] res = client.get(res.json['links']['next']) assert res.status_code == 400 assert 'message' in get_json(res)
def graph_stat(path=friends.BEST_FRIEND, stat="Messages", period="Year"): """ graph_stat wrapper that parses from a path """ message_json = get_json(path) messages = message_json.get("messages", []) data = get_all_stats(messages) _graph_stat(data, stat=stat, period=period)
def count_packages(repo_id): """Return the number of packages/erratum in a respository""" result = helpers.get_json(helpers.KATELLO_API + "repositories/" + str(repo_id)) numpkg = result['content_counts']['rpm'] numerrata = result['content_counts']['erratum'] return numpkg, numerrata
def test_valid_patch(app, test_records, test_patch): """Test VALID record patch request (PATCH .../records/<record_id>).""" pid, record = test_records[0] # Check that assert record.patch(test_patch) with app.test_client() as client: # Check that patch and record is not the same value for year. url = record_url(pid) previous_year = get_json(client.get(url))["metadata"]["year"] # Patch record res = client.patch(url, data=json.dumps(test_patch), headers=HEADERS) assert res.status_code == 200 # Check that year changed. assert previous_year != get_json(client.get(url))["metadata"]["year"]
def check_incomplete_sync(): """ Check for any sync tasks that are in an Incomplete state. These are not paused or locked, but are the orange 100% complete ones in the UI """ repo_list = helpers.get_json( helpers.KATELLO_API + "/content_view_versions") # Extract the list of repo ids, then check the state of each one. incomplete_sync = False for repo in repo_list['results']: for repo_id in repo['repositories']: repo_status = helpers.get_json( helpers.KATELLO_API + "/repositories/" + str(repo_id['id'])) if repo_status['content_type'] == 'yum': if repo_status['last_sync'] is None: if repo_status['url'] is None: msg = "Repo ID " + str(repo_id['id']) + " No Sync Configured" #helpers.log_msg(msg, 'DEBUG') elif repo_status['last_sync']['state'] == 'stopped': if repo_status['last_sync']['result'] == 'warning': incomplete_sync = True msg = "Repo ID " + str(repo_id['id']) + " Sync Incomplete" helpers.log_msg(msg, 'DEBUG') # If we have detected incomplete sync tasks, ask the user if they want to export anyway. # This isn't fatal, but *MAY* lead to inconsistent repositories on the dieconnected sat. if incomplete_sync: msg = "Incomplete sync jobs detected" helpers.log_msg(msg, 'WARNING') if not args.unattended: answer = helpers.query_yes_no("Continue with export?", "no") if not answer: msg = "Export Aborted" helpers.log_msg(msg, 'ERROR') sys.exit(3) else: msg = "Export continued by user" helpers.log_msg(msg, 'INFO') else: msg = "Export Aborted" helpers.log_msg(msg, 'ERROR') sys.exit(3)
def test_invalid_accept(app, indexed_records, search_url): """Test invalid accept header.""" headers = [('Accept', 'application/does_not_exist')] with app.test_client() as client: res = client.get(search_url, headers=headers) assert res.status_code == 406 data = get_json(res) assert 'message' in data assert data['status'] == 406
def test_aggregations_info(app, indexed_records, search_url): """Test aggregations.""" with app.test_client() as client: # Facets are defined in the "app" fixture. res = client.get(search_url) data = get_json(res) assert "aggregations" in data # len 3 because testrecords.json have three diff values for "stars" assert len(data["aggregations"]["stars"]["buckets"]) == 3 assert data["aggregations"]["stars"]["buckets"][0] == dict(key=4, doc_count=2)
def count_packages(repo_id): """ Return the number of packages/erratum in a respository """ result = helpers.get_json( helpers.KATELLO_API + "repositories/" + str(repo_id) ) numpkg = result['content_counts']['rpm'] numerrata = result['content_counts']['erratum'] return numpkg, numerrata
def test_item_get(app, test_records): """Test record retrieval.""" with app.test_client() as client: pid, record = test_records[0] res = client.get(record_url(pid)) assert res.status_code == 200 assert res.headers["ETag"] == '"{}"'.format(record.revision_id) # Check metadata data = get_json(res) for k in ["id", "created", "updated", "metadata", "links"]: assert k in data assert data["id"] == int(pid.pid_value) assert data["metadata"] == record.dumps() # Check self links client.get(to_relative_url(data["links"]["self"])) assert res.status_code == 200 assert data == get_json(res)
def test_item_get(app, test_records): """Test record retrieval.""" with app.test_client() as client: pid, record = test_records[0] res = client.get(record_url(pid)) assert res.status_code == 200 assert res.headers['ETag'] == '"{}"'.format(record.revision_id) # Check metadata data = get_json(res) for k in ['id', 'created', 'updated', 'metadata', 'links']: assert k in data assert data['id'] == int(pid.pid_value) assert data['metadata'] == record.dumps() # Check self links client.get(to_relative_url(data['links']['self'])) assert res.status_code == 200 assert data == get_json(res)
def test_valid_patch(app, test_records, test_patch, content_type): """Test VALID record patch request (PATCH .../records/<record_id>).""" HEADERS = [ ('Accept', 'application/json'), ('Content-Type', content_type) ] pid, record = test_records[0] # Check that assert record.patch(test_patch) with app.test_client() as client: # Check that patch and record is not the same value for year. url = record_url(pid) previous_year = get_json(client.get(url))['metadata']['year'] # Patch record res = client.patch(url, data=json.dumps(test_patch), headers=HEADERS) assert res.status_code == 200 # Check that year changed. assert previous_year != get_json(client.get(url))['metadata']['year']
def test_delete_deleted(app, test_records): """Test deleting a perviously deleted record.""" pid, record = test_records[0] with app.test_client() as client: res = client.delete(record_url(pid)) assert res.status_code == 204 res = client.delete(record_url(pid)) assert res.status_code == 410 data = get_json(res) assert 'message' in data assert data['status'] == 410
def test_pagination(app, indexed_records, search_url): """Test pagination.""" with app.test_client() as client: # Limit records res = client.get(search_url, query_string=dict(size=1, page=1)) assert_hits_len(res, 1) data = get_json(res) assert 'self' in data['links'] assert 'next' in data['links'] assert 'prev' not in data['links'] # Assert next URL before calling it next_url = get_json(res)['links']['next'] parsed_url = parse_url(next_url) assert parsed_url['qs']['size'] == ['1'] assert parsed_url['qs']['page'] == ['2'] # Access next URL res = client.get(to_relative_url(next_url)) assert_hits_len(res, 1) data = get_json(res) assert data['links']['self'] == next_url assert 'next' in data['links'] assert 'prev' in data['links']
def test_page_size(app, indexed_records, search_url): """Test page and size parameters.""" with app.test_client() as client: # Limit records res = client.get(search_url, query_string=dict(page=1, size=2)) assert_hits_len(res, 2) # All records res = client.get(search_url, query_string=dict(page=1, size=10)) assert_hits_len(res, len(indexed_records)) # Exceed max result window res = client.get(search_url, query_string=dict(page=100, size=100)) assert res.status_code == 400 assert 'message' in get_json(res)
def test_valid_put(app, es, test_records, content_type, search_url, search_class): """Test VALID record patch request (PATCH .../records/<record_id>).""" HEADERS = [ ('Accept', 'application/json'), ('Content-Type', content_type) ] pid, record = test_records[0] record['year'] = 1234 with app.test_client() as client: url = record_url(pid) res = client.put(url, data=json.dumps(record.dumps()), headers=HEADERS) assert res.status_code == 200 # Check that the returned record matches the given data assert get_json(res)['metadata']['year'] == 1234 IndexFlusher(search_class).flush_and_wait() res = client.get(search_url, query_string={"year": 1234}) assert_hits_len(res, 1) # Retrieve record via get request assert get_json(client.get(url))['metadata']['year'] == 1234
def test_pagination(app, indexed_records, search_url): """Test pagination.""" with app.test_client() as client: # Limit records res = client.get(search_url, query_string=dict(size=1, page=1)) assert_hits_len(res, 1) data = get_json(res) assert "self" in data["links"] assert "next" in data["links"] assert "prev" not in data["links"] # Assert next URL before calling it next_url = get_json(res)["links"]["next"] parsed_url = parse_url(next_url) assert parsed_url["qs"]["size"] == ["1"] assert parsed_url["qs"]["page"] == ["2"] # Access next URL res = client.get(to_relative_url(next_url)) assert_hits_len(res, 1) data = get_json(res) assert data["links"]["self"] == next_url assert "next" in data["links"] assert "prev" in data["links"]
def get_cv(org_id, cleanup_list, keep): """Get the content views""" # Query API to get all content views for our org cvs = helpers.get_json( helpers.KATELLO_API + "organizations/" + str(org_id) + "/content_views/") ver_list = collections.OrderedDict() ver_descr = collections.OrderedDict() ver_keep = collections.OrderedDict() # Sort the CVS so that composites are considered first cv_results = sorted(cvs['results'], key=lambda k: k[u'composite'], reverse=True) for cv_result in cv_results: # We will never clean the DOV if cv_result['name'] != "Default Organization View": # Handle specific includes if cleanup_list: # The list contains dictionaries as elements. Process each dictionary for cv in cleanup_list: # If the CV name does not appear in our config list, skip if cv['view'] != cv_result['name']: msg = "Skipping " + cv_result['name'] helpers.log_msg(msg, 'DEBUG') continue else: msg = "Processing content view '" + cv_result['name'] + "' " \ + str(cv_result['id']) helpers.log_msg(msg, 'DEBUG') # Add the next version of the view, and how many versions to keep ver_list[cv_result['id']] = cv_result['id'] ver_descr[cv_result['id']] = cv_result['name'] ver_keep[cv_result['id']] = cv['keep'] # Handle the 'all' option else: msg = "Processing content view '" + cv_result['name'] + "' " \ + str(cv_result['id']) helpers.log_msg(msg, 'DEBUG') # Add the next version of the view, and how many versions to keep ver_list[cv_result['id']] = cv_result['id'] ver_descr[cv_result['id']] = cv_result['name'] ver_keep[cv_result['id']] = keep return ver_list, ver_descr, ver_keep
def test_old_signature_backward_compatibility(app, test_records): """Check that the old Links_factory signature is still supported. This old signature was links_factory(pid), without "record" and "**kwargs" parameters. """ # blueprint = create_blueprint(config) with app.test_client() as client: pid, record = test_records[0] res = client.get(record_url(pid)) assert res.status_code == 200 # Check metadata data = get_json(res) assert data['links']['test_link'] == 'http://old_links_factory.com'
def test_json_result_serializer(app, indexed_records, test_records, search_url): """JSON result.""" with app.test_client() as client: # Get a query with only one record res = client.get(search_url, query_string={"q": "year:2015"}) assert_hits_len(res, 1) assert res.status_code == 200 # Check serialization of record record = get_json(res)["hits"]["hits"][0] for k in ["id", "created", "updated", "metadata", "links"]: assert k in record pid, db_record = test_records[0] assert record["id"] == int(pid.pid_value) assert record["metadata"] == db_record.dumps()