def test_export_blocks_job(tmpdir, start_block, end_block, batch_size, resource_group, web3_provider_type): blocks_output_file = str(tmpdir.join("actual_blocks.csv")) transactions_output_file = str(tmpdir.join("actual_transactions.csv")) job = ExportBlocksJob( start_block=start_block, end_block=end_block, batch_size=batch_size, batch_web3_provider=ThreadLocalProxy(lambda: get_web3_provider( web3_provider_type, lambda file: read_resource(resource_group, file), batch=True, )), max_workers=5, item_exporter=blocks_and_transactions_item_exporter( blocks_output_file, transactions_output_file), export_blocks=blocks_output_file is not None, export_transactions=transactions_output_file is not None, ) job.run() compare_lines_ignore_order( read_resource(resource_group, "expected_blocks.csv"), read_file(blocks_output_file), ) compare_lines_ignore_order( read_resource(resource_group, "expected_transactions.csv"), read_file(transactions_output_file), )
def test_get_user_view(bodhi_container, db_container): """Test ``/users/{name}`` path""" # Fetch user(of latest update) from DB query_users = ("SELECT " " users.name as username " "FROM updates " "JOIN users ON updates.user_id = users.id " "ORDER BY date_submitted DESC LIMIT 1") db_ip = db_container.get_IPv4s()[0] conn = psycopg2.connect( "dbname=bodhi2 user=postgres host={}".format(db_ip)) with conn: with conn.cursor() as curs: curs.execute(query_users) username = curs.fetchone()[0] conn.close() # GET on user with latest update with bodhi_container.http_client(port="8080") as c: headers = {'Accept': 'text/html'} http_response = c.get(f"/users/{username}", headers=headers) try: assert http_response.ok assert f"{username}'s latest updates" in http_response.text assert f"{username}'s latest buildroot overrides" in http_response.text assert "Feedback received on updates" in http_response.text assert "Feedback sent on updates" in http_response.text except AssertionError: print(http_response) print(http_response.text) with read_file(bodhi_container, "/httpdir/errorlog") as log: print(log.read()) raise
def _clone_define(self, filebase): """Take the valid output xml and attempt to define it on the connection to ensure we don't get any errors""" outfile = os.path.join(clonexml_dir, filebase + "-out.xml") outxml = utils.read_file(outfile) conn = utils.URIs.open_testdriver_cached() utils.test_create(conn, outxml)
def _image2XMLhelper(self, image_xml, output_xmls, qemu=False): image2guestdir = self.basedir + "image2guest/" image = virtinst.ImageParser.parse_file(self.basedir + image_xml) if type(output_xmls) is not list: output_xmls = [output_xmls] conn = qemu and self.qemuconn or self.conn caps = qemu and self.qemucaps or self.caps gtype = qemu and "qemu" or "xen" for idx in range(len(output_xmls)): fname = output_xmls[idx] inst = virtinst.ImageInstaller(image, caps, boot_index=idx, conn=conn) utils.set_conn(conn) if inst.is_hvm(): g = utils.get_basic_fullyvirt_guest(typ=gtype) else: g = utils.get_basic_paravirt_guest() g.installer = inst g._prepare_install(None) actual_out = g.get_xml_config(install=False) expect_file = os.path.join(image2guestdir + fname) expect_out = utils.read_file(expect_file) expect_out = expect_out.replace("REPLACEME", os.getcwd()) utils.diff_compare(actual_out, expect_file, expect_out=expect_out) utils.reset_conn()
def _clone(self, filebase, disks=None, force_list=None, skip_list=None, compare=True, conn=None, clone_disks_file=None): """Helper for comparing clone input/output from 2 xml files""" infile = os.path.join(clonexml_dir, filebase + "-in.xml") in_content = utils.read_file(infile) if not conn: conn = utils.URIs.open_testdriver_cached() cloneobj = Cloner(conn) cloneobj.original_xml = in_content for force in force_list or []: cloneobj.force_target = force for skip in skip_list or []: cloneobj.skip_target = skip cloneobj = self._default_clone_values(cloneobj, disks) if compare: self._clone_compare(cloneobj, filebase, clone_disks_file=clone_disks_file) self._clone_define(filebase) else: cloneobj.setup_original() cloneobj.setup_clone()
def test_app_auth_with_valid_pubkey_by_multipart_form(self): url = self.get_url('/') client = self.get_http_client() response = yield client.fetch(url) self.assertEqual(response.code, 200) privatekey = read_file(make_tests_data_path('user_rsa_key')) files = [('privatekey', 'user_rsa_key', privatekey)] content_type, body = encode_multipart_formdata(self.body_dict.items(), files) headers = { 'Content-Type': content_type, 'content-length': str(len(body)) } response = yield client.fetch(url, method='POST', headers=headers, body=body) data = json.loads(to_str(response.body)) self.assertIsNone(data['status']) self.assertIsNotNone(data['id']) self.assertIsNotNone(data['encoding']) url = url.replace('http', 'ws') ws_url = url + 'ws?id=' + data['id'] ws = yield tornado.websocket.websocket_connect(ws_url) msg = yield ws.read_message() self.assertEqual(to_str(msg, data['encoding']), banner) ws.close()
def test_get_releases_view(bodhi_container, db_container): """Test ``/releases`` path""" # Fetch releases from DB query = """SELECT long_name FROM releases""" db_ip = db_container.get_IPv4s()[0] conn = psycopg2.connect( "dbname=bodhi2 user=postgres host={}".format(db_ip)) with conn: with conn.cursor() as curs: curs.execute(query) for record in curs: expected_releases = [r[0] for r in curs] conn.close() # GET on /releases with bodhi_container.http_client(port="8080") as c: headers = {'Accept': 'text/html'} http_response = c.get(f"/releases", headers=headers) try: assert http_response.ok for release_long_name in expected_releases: assert release_long_name in http_response.text except AssertionError: print(http_response) print(http_response.text) with read_file(bodhi_container, "/httpdir/errorlog") as log: print(log.read()) raise
def test_get_root(bodhi_container, db_container): """Test ``/`` path""" # Fetch releases from DB query = ("SELECT long_name " "FROM releases " "WHERE state = 'pending' OR state = 'current'") db_ip = db_container.get_IPv4s()[0] conn = psycopg2.connect( "dbname=bodhi2 user=postgres host={}".format(db_ip)) with conn: with conn.cursor() as curs: curs.execute(query) expected_releases = [r[0] for r in curs] conn.close() # GET on / with bodhi_container.http_client(port="8080") as c: headers = {'Accept': 'text/html'} http_response = c.get("/", headers=headers) try: assert http_response.ok assert "Fedora Update System" in http_response.text for release_long_name in expected_releases: assert release_long_name in http_response.text except AssertionError: print(http_response) print(http_response.text) with read_file(bodhi_container, "/httpdir/errorlog") as log: print(log.read()) raise
def unauthorized_oauth_user(app, db_session): users = dict(json.loads(utils.read_file( 'resources/unauthorized_users.json' ))) user_id, username = utils.create_user( users, db_session, is_admin=True ) return Dict(username=username, user_id=user_id)
def test_parse_real_html(): html = read_file('tests/html/vtuber_antenna/list/real.html') channels = list(parse_vtubers_list(html)) assert len(channels) == 1142 for channel in channels: assert isinstance(channel, YoutubeChannel)
def test_create_row(): with open("test_data/metadata.csv") as meta_file: productdata = process_meta_blob(meta_file) product_bridge_file = read_file("test_data/flow-carb10.xlsx") product_sheet = excel_raw_file_to_sheet(product_bridge_file) product_data = sheet_to_bridge_dict(product_sheet) productdata[0]["cumulative"] = product_data["cumulative"]
def test_get_pkey_obj_with_plain_key(self): fname = 'test_ed25519.key' cls = paramiko.Ed25519Key key = read_file(os.path.join(base_dir, 'tests', fname)) pkey = IndexHandler.get_pkey_obj(key, None) self.assertIsInstance(pkey, cls) pkey = IndexHandler.get_pkey_obj(key, 'iginored') self.assertIsInstance(pkey, cls) with self.assertRaises(ValueError): pkey = IndexHandler.get_pkey_obj('x' + key, None)
def test_app_with_user_pass2fa_with_wrong_pkey_correct_passwords(self): url = self.get_url('/') privatekey = read_file(make_tests_data_path('user_rsa_key')) self.body_dict.update(username='******', password='******', privatekey=privatekey, totp='passcode') response = yield self.async_post(url, self.body_dict) data = json.loads(to_str(response.body)) self.assert_status_none(data)
def test_app_with_user_pkey2fa_with_empty_passcode(self): url = self.get_url('/') privatekey = read_file(make_tests_data_path('user_rsa_key')) self.body_dict.update(username='******', password='******', privatekey=privatekey, totp='') response = yield self.async_post(url, self.body_dict) data = json.loads(to_str(response.body)) self.assert_status_in('Need a verification code', data)
def app(tmpdir, request): port = 8000 dictionary_setup(_app) # this is to make sure sqlite is initialized # for every unit test reload(default_settings) # fresh files before running for filename in ['auth.sq3', 'index.sq3', 'alias.sq3']: if os.path.exists(filename): os.remove(filename) indexd_app = get_indexd_app() indexd_init(*INDEX_CLIENT['auth']) indexd = Process(target=indexd_app.run, args=['localhost', port]) indexd.start() wait_for_indexd_alive(port) gencode_json = tmpdir.mkdir("slicing").join("test_gencode.json") gencode_json.write( json.dumps({ 'a_gene': ['chr1', None, 200], 'b_gene': ['chr1', 150, 300], 'c_gene': ['chr1', 200, None], 'd_gene': ['chr1', None, None], })) def teardown(): for filename in ['auth.sq3', 'index.sq3', 'alias.sq3']: if os.path.exists(filename): os.remove(filename) indexd.terminate() wait_for_indexd_not_alive(port) _app.config.from_object("sheepdog.test_settings") _app.config["PATH_TO_SCHEMA_DIR"] = PATH_TO_SCHEMA_DIR request.addfinalizer(teardown) app_init(_app) _app.logger.setLevel(os.environ.get("GDC_LOG_LEVEL", "WARNING")) _app.jwt_public_keys = { _app.config['USER_API']: { 'key-test': utils.read_file('./integration/resources/keys/test_public_key.pem') } } _app.auth = ArboristClient() return _app
def test_get_packages_json(bodhi_container, db_container): """Test ``/packages`` path""" # Fetch package(with latest update) from DB query_updates = ("SELECT " " id " "FROM updates " "ORDER BY date_submitted DESC LIMIT 1") query_packages = ("SELECT " " packages.name, " " packages.type " "FROM builds " "JOIN packages ON builds.package_id = packages.id " "WHERE update_id = %s LIMIT 1") db_ip = db_container.get_IPv4s()[0] conn = psycopg2.connect( "dbname=bodhi2 user=postgres host={}".format(db_ip)) with conn: with conn.cursor() as curs: curs.execute(query_updates) update_id = curs.fetchone()[0] curs.execute(query_packages, (update_id, )) row = curs.fetchone() package_name = row[0] package_type = row[1] conn.close() # GET on package with particular name with bodhi_container.http_client(port="8080") as c: http_response = c.get(f"/packages/?name={package_name}") expected_json = { "packages": [{ "name": package_name, "requirements": None, "type": package_type }], "page": 1, "pages": 1, "rows_per_page": 20, "total": 1, } try: assert http_response.ok assert expected_json == http_response.json() except AssertionError: print(http_response) print(http_response.text) with read_file(bodhi_container, "/httpdir/errorlog") as log: print(log.read()) raise
def test_get_specific_pkey_with_plain_key(self): fname = 'test_rsa.key' cls = paramiko.RSAKey key = read_file(os.path.join(base_dir, 'tests', fname)) pkey = IndexHandler.get_specific_pkey(cls, key, None) self.assertIsInstance(pkey, cls) pkey = IndexHandler.get_specific_pkey(cls, key, 'iginored') self.assertIsInstance(pkey, cls) pkey = IndexHandler.get_specific_pkey(cls, 'x' + key, None) self.assertIsNone(pkey)
def test_get_pkey_obj_with_encrypted_key(self): fname = 'test_ed25519_password.key' password = '******' cls = paramiko.Ed25519Key key = read_file(os.path.join(base_dir, 'tests', fname)) pkey = IndexHandler.get_pkey_obj(key, password) self.assertIsInstance(pkey, cls) with self.assertRaises(ValueError): pkey = IndexHandler.get_pkey_obj(key, 'wrongpass') with self.assertRaises(ValueError): pkey = IndexHandler.get_pkey_obj('x' + key, password)
def test_get_pkey_obj_with_plain_key(self): fname = 'test_ed25519.key' cls = paramiko.Ed25519Key key = read_file(make_tests_data_path(fname)) pkey = IndexHandler.get_pkey_obj(key, None, fname) self.assertIsInstance(pkey, cls) pkey = IndexHandler.get_pkey_obj(key, 'iginored', fname) self.assertIsInstance(pkey, cls) with self.assertRaises(InvalidException) as exc: pkey = IndexHandler.get_pkey_obj('x' + key, None, fname) self.assertIn('Invalid private key', str(exc))
def test_get_specific_pkey_with_plain_key(self): fname = 'test_rsa.key' cls = paramiko.RSAKey key = read_file(make_tests_data_path(fname)) pkey = IndexHandler.get_specific_pkey(cls, key, None) self.assertIsInstance(pkey, cls) pkey = IndexHandler.get_specific_pkey(cls, key, 'iginored') self.assertIsInstance(pkey, cls) pkey = IndexHandler.get_specific_pkey(cls, 'x' + key, None) self.assertIsNone(pkey)
def app(tmpdir, request): port = 8000 dictionary_setup(_app) # this is to make sure sqlite is initialized # for every unit test reload(default_settings) # fresh files before running for filename in ["auth.sq3", "index.sq3", "alias.sq3"]: if os.path.exists(filename): os.remove(filename) indexd_app = get_indexd_app() indexd_init(*SIGNPOST["auth"]) indexd = Process(target=indexd_app.run, args=["localhost", port]) indexd.start() wait_for_indexd_alive(port) gencode_json = tmpdir.mkdir("slicing").join("test_gencode.json") gencode_json.write( json.dumps({ "a_gene": ["chr1", None, 200], "b_gene": ["chr1", 150, 300], "c_gene": ["chr1", 200, None], "d_gene": ["chr1", None, None], })) def teardown(): for filename in ["auth.sq3", "index.sq3", "alias.sq3"]: if os.path.exists(filename): os.remove(filename) indexd.terminate() wait_for_indexd_not_alive(port) _app.config.from_object("sheepdog.test_settings") _app.config["PATH_TO_SCHEMA_DIR"] = PATH_TO_SCHEMA_DIR request.addfinalizer(teardown) app_init(_app) _app.logger.setLevel(os.environ.get("GDC_LOG_LEVEL", "WARNING")) _app.jwt_public_keys = { _app.config["USER_API"]: { "key-test": utils.read_file("./integration/resources/keys/test_public_key.pem") } } return _app
def test_get_specific_pkey_with_encrypted_key(self): fname = 'test_rsa_password.key' cls = paramiko.RSAKey password = '******' key = read_file(os.path.join(base_dir, 'tests', fname)) pkey = IndexHandler.get_specific_pkey(cls, key, password) self.assertIsInstance(pkey, cls) pkey = IndexHandler.get_specific_pkey(cls, 'x' + key, None) self.assertIsNone(pkey) with self.assertRaises(ValueError): pkey = IndexHandler.get_specific_pkey(cls, key, None)
def test_get_specific_pkey_with_encrypted_key(self): fname = 'test_rsa_password.key' cls = paramiko.RSAKey password = '******' key = read_file(make_tests_data_path(fname)) pkey = IndexHandler.get_specific_pkey(cls, key, password) self.assertIsInstance(pkey, cls) pkey = IndexHandler.get_specific_pkey(cls, 'x' + key, None) self.assertIsNone(pkey) with self.assertRaises(paramiko.PasswordRequiredException): pkey = IndexHandler.get_specific_pkey(cls, key, None)
def test_get_root(bodhi_container): # GET on / # this is standard `requests.Response` http_response = bodhi_container.http_request(path="/", port=8080) try: assert http_response.ok assert "Fedora Update System" in http_response.text except AssertionError: print(http_response) print(http_response.text) with read_file(bodhi_container, "/httpdir/errorlog") as log: print(log.read()) raise
def test_not_found(self): id = PLACE_HOLDER response = read_file( "tests/api_responses/youtube/videos/list/not_found.json") http = HttpMockSequence([({ "status": 200 }, api_discovery), ({ "status": 200 }, response)]) youtube = YouTube(secret=PLACE_HOLDER, http=http) out = youtube.get_video_by_id(id) self.assertIsNone(out)
def test_get_pkey_obj_with_encrypted_key(self): fname = 'test_ed25519_password.key' password = '******' cls = paramiko.Ed25519Key key = read_file(make_tests_data_path(fname)) pkey = IndexHandler.get_pkey_obj(key, password, fname) self.assertIsInstance(pkey, cls) with self.assertRaises(InvalidException) as exc: pkey = IndexHandler.get_pkey_obj(key, 'wrongpass', fname) self.assertIn('Wrong password', str(exc)) with self.assertRaises(InvalidException) as exc: pkey = IndexHandler.get_pkey_obj('x' + key, password, fname) self.assertIn('Invalid private key', str(exc))
def test_keys_endpoint(app, client): """ Test the return value from the ``/jwt/keys`` endpoint against the configuration for the app. """ response = client.get('/jwt/keys') assert 'keys' in response.json, response.data public_keys = response.json.get('keys') assert public_keys, response.json comparison = zip(public_keys, JWT_KEYPAIR_FILES.items()) for (kid, public_key), (settings_kid, (public_key_file, _)) in comparison: assert kid == settings_kid assert public_key == utils.read_file(public_key_file)
def test_app_auth_with_valid_pubkey_by_urlencoded_form(self): url = self.get_url('/') privatekey = read_file(make_tests_data_path('user_rsa_key')) self.body_dict.update(privatekey=privatekey) response = yield self.async_post(url, self.body_dict) data = json.loads(to_str(response.body)) self.assert_status_none(data) url = url.replace('http', 'ws') ws_url = url + 'ws?id=' + data['id'] ws = yield tornado.websocket.websocket_connect(ws_url) msg = yield ws.read_message() self.assertEqual(to_str(msg, data['encoding']), banner) ws.close()
def test_not_found(self): video = YoutubeVideo("https://www.youtube.com/watch?v=invalid_id") response = read_file( "tests/api_responses/youtube/videos/list/not_found.json") http = HttpMockSequence([({ "status": 200 }, api_discovery), ({ "status": 200 }, response)]) youtube = YouTube(secret=PLACE_HOLDER, http=http) out = youtube.get_video_detail(video) self.assertIsNone(out)
def test_get_specific_pkey_with_encrypted_key(self): fname = 'test_rsa_password.key' cls = paramiko.RSAKey password = '******' key = read_file(make_tests_data_path(fname)) pkey = IndexHandler.get_specific_pkey(cls, key, password) self.assertIsInstance(pkey, cls) pkey = IndexHandler.get_specific_pkey(cls, 'x' + key, None) self.assertIsNone(pkey) with self.assertRaises(InvalidValueError) as ctx: pkey = IndexHandler.get_specific_pkey(cls, key, None) self.assertIn('Need a password', str(ctx.exception))
def notestCloneGuestLookup(self): """Test using a vm name lookup for cloning""" for base in clone_files: infile = os.path.join(clonexml_dir, base + "-in.xml") vm = None try: vm = conn.defineXML(utils.read_file(infile)) cloneobj = Cloner(conn) cloneobj.original_guest = ORIG_NAME cloneobj = self._default_clone_values(cloneobj) self._clone_compare(cloneobj, base) finally: if vm: vm.undefine()
def _clone_helper(self, filebase, disks=None, force_list=None, skip_list=None, compare=True, useconn=None): """Helper for comparing clone input/output from 2 xml files""" infile = os.path.join(clonexml_dir, filebase + "-in.xml") in_content = utils.read_file(infile) cloneobj = Cloner(useconn or conn) cloneobj.original_xml = in_content for force in force_list or []: cloneobj.force_target = force for skip in skip_list or []: cloneobj.skip_target = skip cloneobj = self._default_clone_values(cloneobj, disks) if compare: self._clone_compare(cloneobj, filebase) self._clone_define(filebase) else: cloneobj.setup()
def _image2XMLhelper(self, image_xml, output_xmls, qemu=False): image2guestdir = self.basedir + "image2guest/" image = virtimage.parse_file(self.basedir + image_xml) if type(output_xmls) is not list: output_xmls = [output_xmls] conn = qemu and self.qemuconn or self.conn gtype = qemu and "qemu" or "xen" for idx in range(len(output_xmls)): fname = output_xmls[idx] inst = virtimage.ImageInstaller(conn, image, boot_index=idx) capsguest, capsdomain = inst.get_caps_guest() if capsguest.os_type == "hvm": g = utils.get_basic_fullyvirt_guest(typ=gtype) else: g = utils.get_basic_paravirt_guest() g.os.os_type = capsguest.os_type g.type = capsdomain.hypervisor_type g.os.arch = capsguest.arch utils.set_conn(conn) g.installer = inst ignore, actual_out = g.start_install(return_xml=True, dry=True) actual_out = g.get_install_xml(install=False) expect_file = os.path.join(image2guestdir + fname) expect_out = utils.read_file(expect_file) expect_out = expect_out.replace("REPLACEME", os.getcwd()) utils.diff_compare(actual_out, expect_file, expect_out=expect_out) utils.reset_conn()
def _clone_define(self, filebase): """Take the valid output xml and attempt to define it on the connection to ensure we don't get any errors""" outfile = os.path.join(clonexml_dir, filebase + "-out.xml") outxml = utils.read_file(outfile) utils.test_create(conn, outxml)