def test_list_ipblocks(self): ipblocks = self.client.list_ipblocks() self.assertGreater(len(ipblocks), 0) self.assertGreater(ipblocks['items'][0]['properties']['size'], 0) assertRegex(self, ipblocks['items'][0]['id'], self.resource['uuid_match']) self.assertIn(ipblocks['items'][0]['properties']['location'], self.resource['locations'])
def test_list(self): servers = self.client.list_servers(datacenter_id=self.datacenter['id']) self.assertGreater(len(servers), 0) self.assertEqual(servers['items'][0]['type'], 'server') self.assertTrue(self, len(servers['items'])>0) assertRegex(self, servers['items'][0]['id'], self.resource['uuid_match'])
def test_panEncryptionAlgo0(self): pan_in = '4876010123456789012' panEnc, pan = pci.encrypt(pan_in, None, '00', self.depco) # self.assertEqual(pan, pan_in.encode('hex')) six.assertRegex(self, panEnc, r'00\d+') panClr, data = pci.decrypt(panEnc, self.depco) self.assertEqual(pan_in, panClr)
def test_unset_http_equiv_2(self): htmlmeta_hub.pyramid_helpers.htmlmeta_set('refresh', '15', request=self.request) b = htmlmeta_hub.pyramid_helpers.htmlmeta_as_html(request=self.request) six.assertRegex(self, b, re_refresh_15) htmlmeta_hub.pyramid_helpers.htmlmeta_unset('refresh', request=self.request) b = htmlmeta_hub.pyramid_helpers.htmlmeta_as_html(request=self.request) self.assertNotRegexpMatches(b, re_refresh_15)
def assert_status_of_phase(self, output, status, phase, test_name, xfail=None): """Asserts that 'output' contains a line showing the given status for the given phase for the given test_name. 'xfail' should have one of the following values: - None (the default): assertion passes regardless of whether there is an EXPECTED/UNEXPECTED string - 'no': The line should end with the phase, with no additional text after that - 'expected': After the phase, the line should contain '(EXPECTED FAILURE)' - 'unexpected': After the phase, the line should contain '(UNEXPECTED' """ expected = (r'^ *{} +'.format(re.escape(status)) + self._test_name_and_phase_regex(test_name, phase)) if xfail == 'no': # There should be no other text after the testname and phase regex expected += r' *$' elif xfail == 'expected': expected += r' *{}'.format(re.escape(test_status.TEST_EXPECTED_FAILURE_COMMENT)) elif xfail == 'unexpected': expected += r' *{}'.format(re.escape(test_status.TEST_UNEXPECTED_FAILURE_COMMENT_START)) else: expect(xfail is None, "Unhandled value of xfail argument") expected_re = re.compile(expected, flags=re.MULTILINE) six.assertRegex(self, output, expected_re)
def test_create_complex(self): fwrule = FirewallRule(**self.resource['fwrule']) nic = NIC(firewall_rules=[fwrule], **self.resource['nic']) volume = Volume(image=self.image['id'], image_password='******', ssh_keys=['ssh-rsa AAAAB3NzaC1'], **self.resource['volume']) server = Server( nics=[nic], create_volumes=[volume], **self.resource['server']) composite_server = self.client.create_server( datacenter_id=self.datacenter['id'], server=server) wait_for_completion(self.client, composite_server, 'create_server', wait_timeout=600) composite_server = self.client.get_server( datacenter_id=self.datacenter['id'], server_id=composite_server['id']) assertRegex(self, composite_server['id'], self.resource['uuid_match']) self.assertEqual(composite_server['properties']['name'], self.resource['server']['name']) self.assertEqual(composite_server['properties']['cores'], self.resource['server']['cores']) self.assertEqual(composite_server['properties']['ram'], self.resource['server']['ram']) self.assertEqual(composite_server['properties']['availabilityZone'], 'AUTO') self.assertIn(composite_server['properties']['vmState'], self.resource['vm_states'])
def test_list_images(self): images = self.client.list_images() assertRegex(self, images['items'][0]['id'], self.resource['uuid_match']) self.assertGreater(len(images), 0) self.assertEqual(images['items'][0]['type'], 'image') self.assertTrue(self, len(images['items']) > 0)
def test_node_default_attrs(self): tasks = [ {'id': 'task-A'}, ] dotgraph = self.get_dotgraph_with_tasks(tasks) six.assertRegex(self, dotgraph, '"task-A" .*color=yellowgreen.*;') six.assertRegex(self, dotgraph, '"task-A" .*style=filled.*;')
def test_generate_backup_passhprase(self): """Verify that backup passphrase generation works as expected""" exp = r"^([0-9A-Za-z./]{5}-){3}[0-9A-Za-z./]{5}$" for _i in range(100): bp = BlockDev.crypto_generate_backup_passphrase() six.assertRegex(self, bp, exp)
def test_source_methods_with_full_model(self): from sherpa.utils.err import IdentifierErr ui.load_data('full', self.ascii) ui.set_full_model('full', 'powlaw1d.p1') # Test Case 1 try: ui.get_source('full') except IdentifierErr as e: six.assertRegex(self, str(e), "Convolved model\n.*\n is set for dataset full. You should use get_model instead.", str(e)) try: ui.plot_source('full') except IdentifierErr as e: six.assertRegex(self, str(e), "Convolved model\n.*\n is set for dataset full. You should use plot_model instead.", str(e)) # Test Case 2 ui.set_source('full', 'powlaw1d.p2') ui.get_source('full') # Test Case 3 ui.load_data('not_full', self.ascii) try: ui.get_source('not_full') except IdentifierErr as e: self.assertEqual('source not_full has not been set, consider using set_source() or set_model()', str(e))
def testKeyFile(self): # Make sure sha512 appears in returned file documents resp = self.request('/file/%s' % self.publicFile['_id']) self.assertStatusOk(resp) self.assertEqual(resp.json['sha512'], self.publicFile['sha512']) template = '/file/%s/hashsum_file/%s' # Test with bad algo resp = self.request(template % (self.publicFile['_id'], 'foo')) self.assertStatus(resp, 400) six.assertRegex(self, resp.json['message'], '^Invalid value for algo: "foo"') # Should work with public file resp = self.request(template % (self.publicFile['_id'], 'sha512'), isJson=False) self.assertStatusOk(resp) respBody = self.getBody(resp) self.assertEqual(respBody, '%s\n' % self.publicFile['sha512']) self.assertEqual(len(respBody), 129) # Should not work with private file resp = self.request(template % (self.privateFile['_id'], 'sha512')) self.assertStatus(resp, 401) six.assertRegex(self, resp.json['message'], '^Read access denied')
def test_encryptlist(self): enc_data = best.encryptlist(self.depco, ['67033111201401507', '67033111201401556'], '20AEC80DEC6474265EA3657B8D8BAA0DEE5058', key_variant='01', ksk='01', enc2key='00000000000000201508281410376874') for elt in enc_data: six.assertRegex(self, elt, r'^[A-F0-9]{32}$')
def test_encrypt_ecb(self): e2k, enc_data = best.encrypt(self.depco, '0102030401020304', 'B426EE161E95AAE7EBE131D4BF63C71F15F2D8', ksk='01', enc2key='00000000000000201508281410361909') six.assertRegex(self, enc_data, r'^[A-F0-9]{16}') six.assertRegex(self, e2k, r'^[A-F0-9]{32}$')
def assert_status_of_phase(self, output, status, phase, test_name): """Asserts that 'output' contains a line showing the given status for the given phase for the given test_name""" expected = re.compile(r'^ *{} +'.format(re.escape(status)) + self._test_name_and_phase_regex(test_name, phase), flags=re.MULTILINE) six.assertRegex(self, output, expected)
def test_get_ipblock(self): ipblock = self.client.get_ipblock(self.ipblock1['id']) assertRegex(self, ipblock['id'], self.resource['uuid_match']) self.assertEqual(ipblock['id'], self.ipblock1['id']) self.assertEqual(ipblock['properties']['name'], (self.resource['ipblock']['name'])) self.assertEqual(ipblock['properties']['size'], self.resource['ipblock']['size']) self.assertEqual(ipblock['properties']['location'], self.resource['ipblock']['location'])
def test_unset_http_equiv_2(self): a = htmlmeta_hub.HtmlMetaHub() a.set('refresh', '15') b = a.as_html() six.assertRegex(self, b, re_refresh_15) a.unset('refresh') b = a.as_html() self.assertNotRegexpMatches(b, re_refresh_15)
def test_get_lan_members(self): members = self.client.get_lan_members( datacenter_id=self.datacenter['id'], lan_id=self.lan['id']) self.assertGreater(len(members), 0) self.assertEqual(members['items'][0]['type'], 'nic') self.assertEqual(members['items'][0]['properties']['name'], self.resource['nic']['name']) assertRegex(self, members['items'][0]['properties']['mac'], self.resource['mac_match'])
def assertQuickReplyLinks(self, output): """Assert reply links created by setup_quick_replies are present.""" link = r'data-qr="%s">\s*%s\s*</a>' six.assertRegex( self, output.get_data(as_text=True), link % (self.r1, self.sr1) ) six.assertRegex( self, output.get_data(as_text=True), link % (self.r2, self.sr2) )
def test_bad_value(self): # A bad value should be ignored and replaced with the default value. bad_value = 'wibble' with warnings.catch_warnings(record=True) as w: warnings.simplefilter('always') self.options.conventions_override = bad_value self.assertFalse(self.options.conventions_override) exp_wmsg = 'Attempting to set invalid value {!r}'.format(bad_value) six.assertRegex(self, str(w[0].message), exp_wmsg)
def testRunfilesLibrariesFindRunfilesWithoutEnvvars(self): for s, t, exe in [ ("WORKSPACE.mock", "WORKSPACE", False), ("bar/BUILD.mock", "bar/BUILD", False), ("bar/bar.py", "bar/bar.py", True), ("bar/bar-py-data.txt", "bar/bar-py-data.txt", False), ("bar/Bar.java", "bar/Bar.java", False), ("bar/bar-java-data.txt", "bar/bar-java-data.txt", False), ("bar/bar.sh", "bar/bar.sh", True), ("bar/bar-sh-data.txt", "bar/bar-sh-data.txt", False), ("bar/bar.cc", "bar/bar.cc", False), ("bar/bar-cc-data.txt", "bar/bar-cc-data.txt", False), ]: self.CopyFile( self.Rlocation("io_bazel/src/test/py/bazel/testdata/runfiles_test/" + s), t, exe) exit_code, stdout, stderr = self.RunBazel(["info", "bazel-bin"]) self.AssertExitCode(exit_code, 0, stderr) bazel_bin = stdout[0] exit_code, _, stderr = self.RunBazel([ "build", "--verbose_failures", "//bar:bar-py", "//bar:bar-java", "//bar:bar-sh", "//bar:bar-cc" ]) self.AssertExitCode(exit_code, 0, stderr) for lang in [("py", "Python", "bar.py"), ("java", "Java", "Bar.java"), ("sh", "Bash", "bar.sh"), ("cc", "C++", "bar.cc")]: if test_base.TestBase.IsWindows(): bin_path = os.path.join(bazel_bin, "bar/bar-%s.exe" % lang[0]) else: bin_path = os.path.join(bazel_bin, "bar/bar-" + lang[0]) self.assertTrue(os.path.exists(bin_path)) exit_code, stdout, stderr = self.RunProgram( [bin_path], env_remove=set([ "RUNFILES_MANIFEST_FILE", "RUNFILES_MANIFEST_ONLY", "RUNFILES_DIR", "JAVA_RUNFILES", ]), env_add={"TEST_SRCDIR": "__ignore_me__"}) self.AssertExitCode(exit_code, 0, stderr) if len(stdout) < 2: self.fail("stdout(%s): %s" % (lang[0], stdout)) self.assertEqual(stdout[0], "Hello %s Bar!" % lang[1]) six.assertRegex(self, stdout[1], "^rloc=.*/bar/bar-%s-data.txt" % lang[0]) self.assertNotIn("__ignore_me__", stdout[1]) with open(stdout[1].split("=", 1)[1], "r") as f: lines = [l.strip() for l in f.readlines()] if len(lines) != 1: self.fail("lines(%s): %s" % (lang[0], lines)) self.assertEqual(lines[0], "data for " + lang[2])
def test_read_only(self): opt = OptionsDictionary(read_only=True) opt.declare('permanent', 3.0) with self.assertRaises(KeyError) as context: opt['permanent'] = 4.0 expected_msg = ("Tried to set read-only option 'permanent'.") assertRegex(self, str(context.exception), expected_msg)
def test_user_agent(self): agent = cloudinary.get_user_agent() platform = 'MyPlatform/1.2.3 (Test code)' six.assertRegex(self, agent, 'CloudinaryPython/\d\.\d+\.\d+') temp = cloudinary.USER_PLATFORM cloudinary.USER_PLATFORM = platform result = cloudinary.get_user_agent() cloudinary.USER_PLATFORM = temp # restore value before assertion self.assertEqual(result, platform + ' ' + agent)
def test_encrypt_cbc_variant(self): e2k, enc_data = best.encrypt(self.depco, '0102030401020304', '20AEC80DEC6474265EA3657B8D8BAA0DEE5058', ksk='01', key_variant='01', ecb=False, iv='0000000000000000', enc2key='00000000000000201508281410348350') six.assertRegex(self, enc_data, r'^[A-F0-9]{16}$') six.assertRegex(self, e2k, r'^[A-F0-9]{32}$')
def test_reserve_ipblock(self): ipblock = self.client.reserve_ipblock(IPBlock(**self.resource['ipblock'])) assertRegex(self, ipblock['id'], self.resource['uuid_match']) self.assertEqual(ipblock['properties']['name'], (self.resource['ipblock']['name'])) self.assertEqual(ipblock['properties']['size'], self.resource['ipblock']['size']) self.assertEqual(ipblock['properties']['location'], self.resource['ipblock']['location']) self.client.delete_ipblock(ipblock['id'])
def test_encrypt_ecb_variant(self): e2k, enc_data = best.encrypt(self.depco, '67033111201401507', '20AEC80DEC6474265EA3657B8D8BAA0DEE5058', key_variant='01', ksk='01', enc2key='00000000000000201508281410376874') six.assertRegex(self, enc_data, r'^[A-F0-9]{32}$') six.assertRegex(self, e2k, r'^[A-F0-9]{32}$')
def test_cell_datetime_objects(self): future = Future() new_value = not future.cell_datetime_objects with warnings.catch_warnings(record=True) as warn: warnings.simplefilter('always') future.cell_datetime_objects = new_value self.assertEqual(future.cell_datetime_objects, new_value) exp_wmsg = "'Future' property 'cell_datetime_objects' is deprecated" six.assertRegex(self, str(warn[0]), exp_wmsg)
def test_print_report(self): io = six.StringIO() self.h.print_report(unit=self.unit, file=io) expect = r'''\AFunctionName UsedBytes AcquiredBytes Occurrence +Exp +[0-9.\-e]+.?B +[0-9.\-e]+.?B +[0-9]+ +ReLU +[0-9.\-e]+.?B +[0-9.\-e]+.?B +[0-9]+$ ''' actual = io.getvalue() six.assertRegex(self, actual, expect)
def test_create_simple(self): # Use server created dring server test setup assertRegex(self, self.server['id'], self.resource['uuid_match']) self.assertEqual(self.server['type'], 'server') self.assertEqual(self.server['properties']['name'], self.resource['server']['name']) self.assertEqual(self.server['properties']['cores'], self.resource['server']['cores']) self.assertEqual(self.server['properties']['ram'], self.resource['server']['ram']) self.assertIsNone(self.server['properties']['availabilityZone']) self.assertIsNone(self.server['properties']['vmState'])
def test_get(self): datacenter = self.client.get_datacenter( datacenter_id=self.datacenter['id']) assertRegex(self, datacenter['id'], self.resource['uuid_match']) self.assertEqual(datacenter['type'], 'datacenter') self.assertEqual(datacenter['id'], self.datacenter['id']) self.assertEqual(datacenter['properties']['name'], self.resource['datacenter']['name']) self.assertEqual(datacenter['properties']['description'], self.resource['datacenter']['description']) self.assertEqual(datacenter['properties']['location'], self.resource['datacenter']['location'])
def _check_compliance_level(testCase, response): """ Current complies with Level 1 API, so should assert no more. """ six.assertRegex( testCase, response['Link'], r'\<http:\/\/library.stanford.edu\/iiif\/image-api\/' + r'compliance.html#level[01]\>;rel="compliesTo"', "Compliance header missing")
def test_delete_ipblock(self): ipblock = self.client.delete_ipblock(self.ipblock2['id']) self.assertTrue(ipblock) assertRegex(self, ipblock['requestId'], self.resource['uuid_match'])
def testRunfilesLibrariesFindRunfilesWithRunfilesManifestEnvvar(self): for s, t, exe in [ ("WORKSPACE.mock", "WORKSPACE", False), ("bar/BUILD.mock", "bar/BUILD", False), # Note: do not test Python here, because py_binary always needs a # runfiles tree, even on Windows, because it needs __init__.py files in # every directory where there may be importable modules, so Bazel always # needs to create a runfiles tree for py_binary. ("bar/Bar.java", "bar/Bar.java", False), ("bar/bar-java-data.txt", "bar/bar-java-data.txt", False), ("bar/bar.sh", "bar/bar.sh", True), ("bar/bar-sh-data.txt", "bar/bar-sh-data.txt", False), ]: self.CopyFile( self.Rlocation("io_bazel/src/test/py/bazel/testdata/runfiles_test/" + s), t, exe) exit_code, stdout, stderr = self.RunBazel(["info", "bazel-bin"]) self.AssertExitCode(exit_code, 0, stderr) bazel_bin = stdout[0] for lang in [("java", "Java"), ("sh", "Bash")]: # TODO(laszlocsomor): add "cc" when ready. exit_code, _, stderr = self.RunBazel([ "build", "--experimental_enable_runfiles=no", "//bar:bar-" + lang[0] ]) self.AssertExitCode(exit_code, 0, stderr) if test_base.TestBase.IsWindows(): bin_path = os.path.join(bazel_bin, "bar/bar-%s.exe" % lang[0]) else: bin_path = os.path.join(bazel_bin, "bar/bar-" + lang[0]) manifest_path = bin_path + ".runfiles_manifest" self.assertTrue(os.path.exists(bin_path)) self.assertTrue(os.path.exists(manifest_path)) # Create a copy of the runfiles manifest, replacing # "bar/bar-<lang>-data.txt" with a custom file. mock_bar_dep = self.ScratchFile("bar-%s-mockdata.txt" % lang[0], ["mock %s data" % lang[0]]) if test_base.TestBase.IsWindows(): # Runfiles manifests use forward slashes as path separators, even on # Windows. mock_bar_dep = mock_bar_dep.replace("\\", "/") manifest_key = "foo_ws/bar/bar-%s-data.txt" % lang[0] mock_manifest_line = manifest_key + " " + mock_bar_dep with open(manifest_path, "rt") as f: # Only rstrip newlines. Do not rstrip() completely, because that would # remove spaces too. This is necessary in order to have at least one # space in every manifest line. # Some manifest entries don't have any path after this space, namely the # "__init__.py" entries. (Bazel writes such manifests on every # platform). The reason is that these files are never symlinks in the # runfiles tree, Bazel actually creates empty __init__.py files (again # on every platform). However to keep these manifest entries correct, # they need to have a space character. # We could probably strip thses lines completely, but this test doesn't # aim to exercise what would happen in that case. mock_manifest_data = [ mock_manifest_line if line.split(" ", 1)[0] == manifest_key else line.rstrip("\n\r") for line in f ] substitute_manifest = self.ScratchFile( "mock-%s.runfiles/MANIFEST" % lang[0], mock_manifest_data) exit_code, stdout, stderr = self.RunProgram( [bin_path], env_remove=set(["RUNFILES_DIR"]), env_add={ # On Linux/macOS, the Java launcher picks up JAVA_RUNFILES and # ignores RUNFILES_MANIFEST_FILE. "JAVA_RUNFILES": substitute_manifest[:-len("/MANIFEST")], # On Windows, the Java launcher picks up RUNFILES_MANIFEST_FILE. # The C++ runfiles library picks up RUNFILES_MANIFEST_FILE on all # platforms. "RUNFILES_MANIFEST_FILE": substitute_manifest, "RUNFILES_MANIFEST_ONLY": "1", "TEST_SRCDIR": "__ignore_me__", }) self.AssertExitCode(exit_code, 0, stderr) if len(stdout) < 2: self.fail("stdout: %s" % stdout) self.assertEqual(stdout[0], "Hello %s Bar!" % lang[1]) six.assertRegex(self, stdout[1], "^rloc=" + mock_bar_dep) self.assertNotIn("__ignore_me__", stdout[1]) with open(stdout[1].split("=", 1)[1], "r") as f: lines = [l.strip() for l in f.readlines()] if len(lines) != 1: self.fail("lines: %s" % lines) self.assertEqual(lines[0], "mock %s data" % lang[0])
def test_unicode(self): data_issue = models.DataIssue.objects.first() six.assertRegex(self, str(data_issue), r'Data Issue \([0-9 :-]{19}\): test \(me\)')
def testS3AssetstoreAdapter(self): # Delete the default assetstore Assetstore().remove(self.assetstore) s3Regex = (r'^(https://s3.amazonaws.com(:443)?/bucketname/foo/bar|' 'https://bucketname.s3.amazonaws.com(:443)?/foo/bar)') params = { 'name': 'S3 Assetstore', 'type': AssetstoreType.S3, 'bucket': '', 'accessKeyId': 'someKey', 'secret': 'someSecret', 'prefix': '/foo/bar/' } # Validation should fail with empty bucket name resp = self.request(path='/assetstore', method='POST', user=self.admin, params=params) self.assertStatus(resp, 400) self.assertEqual( resp.json, { 'type': 'validation', 'field': 'bucket', 'message': 'Bucket must not be empty.' }) params['bucket'] = 'bucketname' # Validation should fail with a missing bucket resp = self.request(path='/assetstore', method='POST', user=self.admin, params=params) self.assertStatus(resp, 400) self.assertEqual( resp.json, { 'type': 'validation', 'field': 'bucket', 'message': 'Unable to write into bucket "bucketname".' }) # Validation should fail with a bogus service name params['service'] = 'ftp://nowhere' resp = self.request(path='/assetstore', method='POST', user=self.admin, params=params) self.assertStatus(resp, 400) del params['service'] # Create a bucket (mocked using moto), so that we can create an assetstore in it botoParams = makeBotoConnectParams(params['accessKeyId'], params['secret']) client = mock_s3.createBucket(botoParams, 'bucketname') # Create an assetstore resp = self.request(path='/assetstore', method='POST', user=self.admin, params=params) self.assertStatusOk(resp) assetstore = Assetstore().load(resp.json['_id']) # Set the assetstore to current. This is really to test the edit assetstore code. params['current'] = True resp = self.request(path='/assetstore/%s' % assetstore['_id'], method='PUT', user=self.admin, params=params) self.assertStatusOk(resp) # Test init for a single-chunk upload folders = Folder().childFolders(self.admin, 'user') parentFolder = six.next(folders) params = { 'parentType': 'folder', 'parentId': parentFolder['_id'], 'name': 'My File.txt', 'size': 1024, 'mimeType': 'text/plain' } resp = self.request(path='/file', method='POST', user=self.admin, params=params) self.assertStatusOk(resp) self.assertEqual(resp.json['received'], 0) self.assertEqual(resp.json['size'], 1024) self.assertEqual(resp.json['behavior'], 's3') singleChunkUpload = resp.json s3Info = singleChunkUpload['s3'] self.assertEqual(s3Info['chunked'], False) self.assertIsInstance(s3Info['chunkLength'], int) self.assertEqual(s3Info['request']['method'], 'PUT') six.assertRegex(self, s3Info['request']['url'], s3Regex) self.assertEqual(s3Info['request']['headers']['x-amz-acl'], 'private') # Test resume of a single-chunk upload resp = self.request(path='/file/offset', method='GET', user=self.admin, params={'uploadId': resp.json['_id']}) self.assertStatusOk(resp) self.assertEqual(resp.json['method'], 'PUT') self.assertTrue('headers' in resp.json) six.assertRegex(self, resp.json['url'], s3Regex) # Test finalize for a single-chunk upload resp = self.request(path='/file/completion', method='POST', user=self.admin, params={'uploadId': singleChunkUpload['_id']}) self.assertStatusOk(resp) self.assertEqual(resp.json['size'], 1024) self.assertEqual(resp.json['assetstoreId'], str(assetstore['_id'])) self.assertFalse('s3Key' in resp.json) self.assertFalse('relpath' in resp.json) file = File().load(resp.json['_id'], force=True) self.assertTrue('s3Key' in file) six.assertRegex(self, file['relpath'], '^/bucketname/foo/bar/') # Test init for a multi-chunk upload params['size'] = 1024 * 1024 * 1024 * 5 resp = self.request(path='/file', method='POST', user=self.admin, params=params) self.assertStatusOk(resp) multiChunkUpload = resp.json s3Info = multiChunkUpload['s3'] self.assertEqual(s3Info['chunked'], True) self.assertIsInstance(s3Info['chunkLength'], int) self.assertEqual(s3Info['request']['method'], 'POST') six.assertRegex(self, s3Info['request']['url'], s3Regex) # Test uploading a chunk resp = self.request(path='/file/chunk', method='POST', user=self.admin, params={ 'uploadId': multiChunkUpload['_id'], 'offset': 0, 'chunk': json.dumps({ 'partNumber': 1, 's3UploadId': 'abcd' }) }) self.assertStatusOk(resp) six.assertRegex(self, resp.json['s3']['request']['url'], s3Regex) self.assertEqual(resp.json['s3']['request']['method'], 'PUT') # We should not be able to call file/offset with multi-chunk upload resp = self.request(path='/file/offset', method='GET', user=self.admin, params={'uploadId': multiChunkUpload['_id']}) self.assertStatus(resp, 400) self.assertEqual( resp.json, { 'type': 'validation', 'message': 'You should not call requestOffset on a chunked ' 'direct-to-S3 upload.' }) # Test finalize for a multi-chunk upload resp = self.request(path='/file/completion', method='POST', user=self.admin, params={'uploadId': multiChunkUpload['_id']}) largeFile = resp.json self.assertStatusOk(resp) six.assertRegex(self, resp.json['s3FinalizeRequest']['url'], s3Regex) self.assertEqual(resp.json['s3FinalizeRequest']['method'], 'POST') # Test init for an empty file (should be no-op) params['size'] = 0 resp = self.request(path='/file', method='POST', user=self.admin, params=params) emptyFile = resp.json self.assertStatusOk(resp) self.assertFalse('behavior' in resp.json) self.assertFalse('s3' in resp.json) # Test download for an empty file resp = self.request(path='/file/%s/download' % emptyFile['_id'], user=self.admin, method='GET', isJson=False) self.assertStatusOk(resp) self.assertEqual(self.getBody(resp), '') self.assertEqual(resp.headers['Content-Length'], 0) self.assertEqual(resp.headers['Content-Disposition'], 'attachment; filename="My File.txt"') # Test download of a non-empty file resp = self.request(path='/file/%s/download' % largeFile['_id'], user=self.admin, method='GET', isJson=False) self.assertStatus(resp, 303) six.assertRegex(self, resp.headers['Location'], s3Regex) # Test download of a non-empty file, with Content-Disposition=inline. # Expect the special S3 header response-content-disposition. params = {'contentDisposition': 'inline'} inlineRegex = r'response-content-disposition=inline%3B%20filename%3D%22My%20File.txt%22' resp = self.request(path='/file/%s/download' % largeFile['_id'], user=self.admin, method='GET', isJson=False, params=params) self.assertStatus(resp, 303) six.assertRegex(self, resp.headers['Location'], s3Regex) six.assertRegex(self, resp.headers['Location'], inlineRegex) # Test download as part of a streaming zip @httmock.all_requests def s3_pipe_mock(url, request): if 's3.amazonaws.com' in url.netloc and url.scheme == 'https': return 'dummy file contents' else: raise Exception('Unexpected url %s' % url) with httmock.HTTMock(s3_pipe_mock): resp = self.request('/folder/%s/download' % parentFolder['_id'], method='GET', user=self.admin, isJson=False) self.assertStatusOk(resp) zip = zipfile.ZipFile(io.BytesIO(self.getBody(resp, text=False)), 'r') self.assertTrue(zip.testzip() is None) extracted = zip.read('Public/My File.txt') self.assertEqual(extracted, b'dummy file contents') # Create a "test" key for importing client.put_object(Bucket='bucketname', Key='foo/bar/test', Body=b'') # Attempt to import item directly into user; should fail resp = self.request('/assetstore/%s/import' % assetstore['_id'], method='POST', params={ 'importPath': '/foo/bar', 'destinationType': 'user', 'destinationId': self.admin['_id'] }, user=self.admin) self.assertStatus(resp, 400) self.assertEqual( resp.json['message'], 'Keys cannot be imported directly underneath a user.') # Import existing data from S3 resp = self.request('/folder', method='POST', params={ 'parentType': 'folder', 'parentId': parentFolder['_id'], 'name': 'import destinaton' }, user=self.admin) self.assertStatusOk(resp) importFolder = resp.json resp = self.request('/assetstore/%s/import' % assetstore['_id'], method='POST', params={ 'importPath': '', 'destinationType': 'folder', 'destinationId': importFolder['_id'], }, user=self.admin) self.assertStatusOk(resp) # Data should now appear in the tree resp = self.request('/folder', user=self.admin, params={ 'parentId': importFolder['_id'], 'parentType': 'folder' }) self.assertStatusOk(resp) children = resp.json self.assertEqual(len(children), 1) self.assertEqual(children[0]['name'], 'foo') resp = self.request('/folder', user=self.admin, params={ 'parentId': children[0]['_id'], 'parentType': 'folder' }) self.assertStatusOk(resp) children = resp.json self.assertEqual(len(children), 1) self.assertEqual(children[0]['name'], 'bar') resp = self.request('/item', user=self.admin, params={'folderId': children[0]['_id']}) self.assertStatusOk(resp) self.assertEqual(len(resp.json), 1) item = resp.json[0] self.assertEqual(item['name'], 'test') self.assertEqual(item['size'], 0) resp = self.request('/item/%s/files' % item['_id'], user=self.admin) self.assertStatusOk(resp) self.assertEqual(len(resp.json), 1) self.assertFalse('imported' in resp.json[0]) self.assertFalse('relpath' in resp.json[0]) file = File().load(resp.json[0]['_id'], force=True) self.assertTrue(file['imported']) self.assertFalse('relpath' in file) self.assertEqual(file['size'], 0) self.assertEqual(file['assetstoreId'], assetstore['_id']) self.assertTrue( client.get_object(Bucket='bucketname', Key='foo/bar/test') is not None) # Deleting an imported file should not delete it from S3 with mock.patch('girder.events.daemon.trigger') as daemon: resp = self.request('/item/%s' % str(item['_id']), method='DELETE', user=self.admin) self.assertStatusOk(resp) self.assertEqual(len(daemon.mock_calls), 0) # Create the file key in the moto s3 store so that we can test that it gets deleted. file = File().load(largeFile['_id'], user=self.admin) client.create_multipart_upload(Bucket='bucketname', Key=file['s3Key']) client.put_object(Bucket='bucketname', Key=file['s3Key'], Body=b'test') # Test delete for a non-empty file resp = self.request(path='/file/%s' % largeFile['_id'], user=self.admin, method='DELETE') self.assertStatusOk(resp) # The file should be gone now resp = self.request(path='/file/%s/download' % largeFile['_id'], user=self.admin, isJson=False) self.assertStatus(resp, 400) # The actual delete may still be in the event queue, so we want to # check the S3 bucket directly. startTime = time.time() while True: try: client.get_object(Bucket='bucketname', Key=file['s3Key']) except botocore.exceptions.ClientError: break if time.time() - startTime > 15: break # give up and fail time.sleep(0.1) with self.assertRaises(botocore.exceptions.ClientError): client.get_object(Bucket='bucketname', Key=file['s3Key']) resp = self.request(path='/folder/%s' % parentFolder['_id'], method='DELETE', user=self.admin) self.assertStatusOk(resp)
def test_createTestCommands_testsuite(self): """The correct create_test commands should be run with a test suite This tests that multiple create_test commands are run, one with each compiler in the given test suite for the given machine This test also checks the stdout and stderr files used for each command It also ensures that the cs.status.fails and cs.status files are created """ machine = self._make_machine() with mock.patch('ctsm.run_sys_tests.datetime') as mock_date, \ mock.patch('ctsm.run_sys_tests.get_tests_from_xml') as mock_get_tests: mock_date.now.side_effect = self._fake_now mock_get_tests.return_value = [{ 'compiler': 'intel' }, { 'compiler': 'pgi' }, { 'compiler': 'intel' }] run_sys_tests(machine=machine, cime_path=self._cime_path(), suite_name='my_suite') all_commands = machine.job_launcher.get_commands() self.assertEqual(len(all_commands), 2) for command in all_commands: six.assertRegex(self, command.cmd, r'--xml-category +{}(\s|$)'.format('my_suite')) six.assertRegex( self, command.cmd, r'--xml-machine +{}(\s|$)'.format(self._MACHINE_NAME)) six.assertRegex(self, all_commands[0].cmd, r'--xml-compiler +intel(\s|$)') six.assertRegex(self, all_commands[1].cmd, r'--xml-compiler +pgi(\s|$)') expected_testid1 = '{}_int'.format(self._expected_testid()) expected_testid2 = '{}_pgi'.format(self._expected_testid()) six.assertRegex(self, all_commands[0].cmd, r'--test-id +{}(\s|$)'.format(expected_testid1)) six.assertRegex(self, all_commands[1].cmd, r'--test-id +{}(\s|$)'.format(expected_testid2)) expected_testroot_path = os.path.join(self._scratch, self._expected_testroot()) self.assertEqual( all_commands[0].out, os.path.join(expected_testroot_path, 'STDOUT.' + expected_testid1)) self.assertEqual( all_commands[0].err, os.path.join(expected_testroot_path, 'STDERR.' + expected_testid1)) self.assertEqual( all_commands[1].out, os.path.join(expected_testroot_path, 'STDOUT.' + expected_testid2)) self.assertEqual( all_commands[1].err, os.path.join(expected_testroot_path, 'STDERR.' + expected_testid2)) expected_cs_status = os.path.join(self._scratch, self._expected_testroot(), 'cs.status') expected_cs_status = os.path.join(self._scratch, self._expected_testroot(), 'cs.status.fails') self.assertTrue(os.path.isfile(expected_cs_status))
def testUploadDownload(self): localDir = os.path.join(os.path.dirname(__file__), 'testdata') args = [ 'upload', str(self.publicFolder['_id']), localDir, '--parent-type=folder' ] with self.assertRaises(requests.HTTPError): invokeCli(args) with self.assertRaises(requests.HTTPError): invokeCli(['--api-key', '1234'] + args) # Test dry-run and blacklist options ret = invokeCli(args + ['--dry-run', '--blacklist=hello.txt'], username='******', password='******') self.assertEqual(ret['exitVal'], 0) self.assertIn('Ignoring file hello.txt as it is blacklisted', ret['stdout']) # Test with multiple files in a dry-run ret = invokeCli([ 'upload', str(self.publicFolder['_id']), '--parent-type=folder', os.path.join(localDir, 'hello.txt'), os.path.join(localDir, 'world.txt'), '--dry-run' ], username='******', password='******') self.assertEqual(ret['exitVal'], 0) self.assertIn('Uploading Item from hello.txt', ret['stdout']) self.assertIn('Uploading Item from world.txt', ret['stdout']) # Actually upload the test data ret = invokeCli(args, username='******', password='******', useApiUrl=True) self.assertEqual(ret['exitVal'], 0) six.assertRegex( self, ret['stdout'], 'Creating Folder from .*tests/cases/py_client/testdata') self.assertIn('Uploading Item from hello.txt', ret['stdout']) subfolder = six.next(Folder().childFolders(parent=self.publicFolder, parentType='folder', limit=1)) self.assertEqual(subfolder['name'], 'testdata') items = list(Folder().childItems(folder=subfolder)) toUpload = list(os.listdir(localDir)) self.assertEqual(len(toUpload), len(items)) downloadDir = os.path.join(os.path.dirname(localDir), '_testDownload') ret = invokeCli(('download', str(subfolder['_id']), downloadDir), username='******', password='******') self.assertEqual(ret['exitVal'], 0) for downloaded in os.listdir(downloadDir): if downloaded == '.girder_metadata': continue self.assertIn(downloaded, toUpload) # Download again to same location, we should not get errors ret = invokeCli(('download', str(subfolder['_id']), downloadDir), username='******', password='******') self.assertEqual(ret['exitVal'], 0) # Download again to same location, using path, we should not get errors ret = invokeCli( ('download', '/user/mylogin/Public/testdata', downloadDir), username='******', password='******') self.assertEqual(ret['exitVal'], 0) # Test uploading with reference queryList = [] @httmock.urlmatch(netloc='localhost', path='/api/v1/file$', method='POST') def checkParams(url, request): # Add query for every file upload request queryList.append(six.moves.urllib.parse.parse_qs(url[3])) with httmock.HTTMock(checkParams): ret = invokeCli(args + ['--reference', 'reference_string'], username='******', password='******') # Test if reference is sent with each file upload fileList = os.listdir(localDir) self.assertTrue(queryList) self.assertTrue(fileList) self.assertEqual(len(queryList), len(fileList)) for query in queryList: self.assertIn('reference', query) self.assertIn('reference_string', query['reference']) # Create a collection and subfolder resp = self.request('/collection', 'POST', user=self.user, params={'name': 'my_collection'}) self.assertStatusOk(resp) resp = self.request('/folder', 'POST', user=self.user, params={ 'parentType': 'collection', 'parentId': resp.json['_id'], 'name': 'my_folder' }) self.assertStatusOk(resp) # Test download of the collection ret = invokeCli(('download', '--parent-type=collection', '/collection/my_collection', downloadDir), username='******', password='******') self.assertEqual(ret['exitVal'], 0) self.assertTrue(os.path.isdir(os.path.join(downloadDir, 'my_folder'))) shutil.rmtree(downloadDir, ignore_errors=True) # Test download of the collection auto-detecting parent-type ret = invokeCli(('download', '/collection/my_collection', downloadDir), username='******', password='******') self.assertEqual(ret['exitVal'], 0) self.assertTrue(os.path.isdir(os.path.join(downloadDir, 'my_folder'))) shutil.rmtree(downloadDir, ignore_errors=True) # Test download of a user ret = invokeCli( ('download', '--parent-type=user', '/user/mylogin', downloadDir), username='******', password='******') self.assertEqual(ret['exitVal'], 0) self.assertTrue( os.path.isfile( os.path.join(downloadDir, 'Public', 'testdata', 'hello.txt'))) shutil.rmtree(downloadDir, ignore_errors=True) # Test download of a user auto-detecting parent-type ret = invokeCli(('download', '/user/mylogin', downloadDir), username='******', password='******') self.assertEqual(ret['exitVal'], 0) self.assertTrue( os.path.isfile( os.path.join(downloadDir, 'Public', 'testdata', 'hello.txt'))) shutil.rmtree(downloadDir, ignore_errors=True) # Test download of an item items = list(Folder().childItems(folder=subfolder)) item_id = items[0]['_id'] item_name = items[0]['name'] ret = invokeCli( ('download', '--parent-type=item', '%s' % item_id, downloadDir), username='******', password='******') self.assertEqual(ret['exitVal'], 0) self.assertTrue(os.path.isfile(os.path.join(downloadDir, item_name))) shutil.rmtree(downloadDir, ignore_errors=True) # Test download of a file os.makedirs(downloadDir) items = list(Folder().childItems(folder=subfolder)) file_name, file_doc = next(Item().fileList(items[0], data=False)) ret = invokeCli( ('download', '--parent-type=file', '%s' % file_doc['_id'], os.path.join(downloadDir, file_name)), username='******', password='******') self.assertEqual(ret['exitVal'], 0) self.assertTrue(os.path.isfile(os.path.join(downloadDir, file_name))) shutil.rmtree(downloadDir, ignore_errors=True) # Test download of an item auto-detecting parent-type ret = invokeCli(('download', '%s' % item_id, downloadDir), username='******', password='******') self.assertEqual(ret['exitVal'], 0) self.assertTrue(os.path.isfile(os.path.join(downloadDir, item_name))) shutil.rmtree(downloadDir, ignore_errors=True) def _check_upload(ret): self.assertEqual(ret['exitVal'], 0) six.assertRegex( self, ret['stdout'], 'Creating Folder from .*tests/cases/py_client/testdata') self.assertIn('Uploading Item from hello.txt', ret['stdout']) # Try uploading using API key _check_upload(invokeCli(['--api-key', self.apiKey['key']] + args)) # Try uploading using API key set with GIRDER_API_KEY env. variable os.environ["GIRDER_API_KEY"] = self.apiKey['key'] _check_upload(invokeCli(args)) del os.environ["GIRDER_API_KEY"] # Test localsync, it shouldn't touch files on 2nd pass ret = invokeCli(('localsync', str(subfolder['_id']), downloadDir), username='******', password='******') self.assertEqual(ret['exitVal'], 0) old_mtimes = {} for fname in os.listdir(downloadDir): filename = os.path.join(downloadDir, fname) old_mtimes[fname] = os.path.getmtime(filename) ret = invokeCli(('localsync', str(subfolder['_id']), downloadDir), username='******', password='******') self.assertEqual(ret['exitVal'], 0) for fname in os.listdir(downloadDir): if fname == '.girder_metadata': continue filename = os.path.join(downloadDir, fname) self.assertEqual(os.path.getmtime(filename), old_mtimes[fname]) # Check that localsync command do not show '--parent-type' option help ret = invokeCli(('localsync', '--help')) self.assertNotIn('--parent-type', ret['stdout']) self.assertEqual(ret['exitVal'], 0) # Check that localsync command still accepts '--parent-type' argument ret = invokeCli( ('localsync', '--parent-type', 'folder', str( subfolder['_id']), downloadDir), username='******', password='******') self.assertEqual(ret['exitVal'], 0)
def assertMatch(self, originalFilename, csvFilename): """Assert that the filename in the CSV matches the original filename.""" regex = self.matchFilenameRegex(csvFilename) six.assertRegex(self, originalFilename, regex)
def test_script(self): assertRegex(self, self.build('script'), re.compile(r'^\s*hello, world!$', re.MULTILINE)) self.assertExists(output_file('file'))
def test_alias(self): output = self.build('hello-world') assertRegex(self, output, re.compile(r'^\s*hello$', re.MULTILINE)) assertRegex(self, output, re.compile(r'^\s*world$', re.MULTILINE))
def test_world(self): assertRegex(self, self.build('world'), re.compile(r'^\s*world$', re.MULTILINE))
def test_hello(self): assertRegex(self, self.build('hello'), re.compile(r'^\s*hello$', re.MULTILINE))
def test_repr(self): r = '%r' % self.uzbl six.assertRegex(self, r, r'<uzbl\(.*\)>')
def test_unique(self): output = self.get_output( ['-c', 'county', 'examples/realdata/ks_1033_data.csv']) six.assertRegex(self, output, r'Unique values:\s+73')
def _check_upload(ret): self.assertEqual(ret['exitVal'], 0) six.assertRegex( self, ret['stdout'], 'Creating Folder from .*tests/cases/py_client/testdata') self.assertIn('Uploading Item from hello.txt', ret['stdout'])
def test_max_length(self): output = self.get_output( ['-c', 'county', 'examples/realdata/ks_1033_data.csv']) six.assertRegex(self, output, r'Longest value:\s+12')
def test_search_reindex(self): six.assertRegex(self, execute('find -q keyvault list --reindex'), 'az keyvault certificate list-versions')
def runTest(self): lines = str(self.g).split('\n') self.assertEqual(lines[0], '_ped.Geometry instance --') self.assertEqual(lines[1], ' start: 10 end: 109 length: 100') six.assertRegex(self, lines[2], '^ device: <_ped.Device object at .*')
def cmake_find_package_frameworks_test(self): conanfile = """from conans import ConanFile, tools class Test(ConanFile): name = "Test" version = "0.1" def package_info(self): self.cpp_info.frameworks.append("Foundation") """ client = TestClient() client.save({"conanfile.py": conanfile}) client.run("export . user/channel") conanfile = """from conans import ConanFile, tools, CMake class Consumer(ConanFile): name = "consumer" version = "0.1" requires = "Test/0.1@user/channel" generators = "cmake_find_package" exports_sources = "CMakeLists.txt" settings = "os", "arch", "compiler" def build(self): cmake = CMake(self) cmake.configure() """ cmakelists = """ project(consumer) cmake_minimum_required(VERSION 3.1) find_package(Test) message("Libraries to link: ${Test_LIBS}") message("Version: ${Test_VERSION}") message("Frameworks: ${Test_FRAMEWORKS}") message("Frameworks found: ${Test_FRAMEWORKS_FOUND}") get_target_property(tmp Test::Test INTERFACE_LINK_LIBRARIES) message("Target libs: ${tmp}") """ client.save({"conanfile.py": conanfile, "CMakeLists.txt": cmakelists}) client.run("create . user/channel --build missing") self.assertIn("Libraries to link:", client.out) self.assertIn('Found Test: 0.1 (found version "0.1")', client.out) self.assertIn("Version: 0.1", client.out) self.assertIn("Frameworks: Foundation", client.out) six.assertRegex( self, str(client.out), r"Frameworks found: [^\s]*/System/Library/Frameworks/Foundation.framework" ) six.assertRegex( self, str(client.out), r"Target libs: [^\s]*/System/Library/Frameworks/Foundation.framework;;" ) self.assertNotIn( "Foundation.framework not found in package, might be system one", client.out) if six.PY2: self.assertNotRegexpMatches( str(client.out), r"Libraries to link: .*Foundation\.framework") else: self.assertNotRegex(str(client.out), r"Libraries to link: .*Foundation\.framework")
def test_createTestCommand_testfileAndExtraArgs(self): """The correct create_test command should be run with a testfile and extra arguments This test covers three things: (1) The use of a testfile argument (2) The use of a bunch of optional arguments that are passed along to create_test (3) That a cs.status.fails file was created """ machine = self._make_machine(account='myaccount') testroot_base = os.path.join(self._scratch, 'my', 'testroot') run_sys_tests(machine=machine, cime_path=self._cime_path(), testfile='/path/to/testfile', testid_base='mytestid', testroot_base=testroot_base, compare_name='mycompare', generate_name='mygenerate', baseline_root='myblroot', walltime='3:45:67', queue='runqueue', extra_create_test_args='--some extra --createtest args') all_commands = machine.job_launcher.get_commands() self.assertEqual(len(all_commands), 1) command = all_commands[0].cmd six.assertRegex(self, command, r'--test-id +mytestid(\s|$)') expected_testroot = os.path.join(testroot_base, 'tests_mytestid') six.assertRegex(self, command, r'--test-root +{}(\s|$)'.format(expected_testroot)) six.assertRegex(self, command, r'--testfile +/path/to/testfile(\s|$)') six.assertRegex(self, command, r'--compare +mycompare(\s|$)') six.assertRegex(self, command, r'--generate +mygenerate(\s|$)') six.assertRegex(self, command, r'--baseline-root +myblroot(\s|$)') six.assertRegex(self, command, r'--walltime +3:45:67(\s|$)') six.assertRegex(self, command, r'--queue +runqueue(\s|$)') six.assertRegex(self, command, r'--project +myaccount(\s|$)') six.assertRegex(self, command, r'--some +extra +--createtest +args(\s|$)') expected_cs_status = os.path.join(expected_testroot, 'cs.status.fails') self.assertTrue(os.path.isfile(expected_cs_status))
def test_should_match_when_tab_present(self): self.exp = self.v.start_of_line().anything().tab().end_of_line().regex( ) six.assertRegex(self, 'One tab only ', self.exp, 'No tab here!')
def test___init__(self): request = self.class_() six.assertRegex(self, request["requestreference"], "A[a-z0-9]+") self.assertEqual(securetrading.version_info, self.version_info)
def test_delete_snapshot(self): snapshot = self.client.delete_snapshot(snapshot_id=self.snapshot2['id']) self.assertTrue(snapshot) assertRegex(self, snapshot['requestId'], self.resource['uuid_match'])
def test_should_match_when_line_break_and_carriage_return_present(self): self.exp = self.v.start_of_line().anything().line_break().anything( ).end_of_line().regex() six.assertRegex(self, 'Marco \r\n Polo', self.exp, 'Give me a break!!')
def test(self): with self.assertRaises(AssertionError): six.assertRegex(self, 'test', r'^a') six.assertRegex(self, 'test', r'^t')
def test_several(self): """ Verify that log functions work consistently in series. Given: FuzzLoggerCsv with a virtual file handle. When: Calling open_test_case with some test_case_id. and: Calling open_test_step with some description. and: Calling log_recv with some data. and: Calling log_send with some data. and: Calling log_info with some description. and: Calling log_check with some description. and: Calling log_fail with some description. and: Calling log_pass with some description. and: Calling log_error with some description. Then: All methods log as expected. """ # When self.logger.open_test_case(self.some_test_case_id, name=self.some_test_case_name, index=self.some_test_case_index) self.logger.open_test_step(self.some_test_step_msg) self.logger.log_recv(self.some_recv_data) self.logger.log_send(self.some_send_data) self.logger.log_info(self.some_log_info_msg) self.logger.log_check(self.some_log_check_msg) self.logger.log_fail(self.some_log_fail_msg) self.logger.log_pass(self.some_log_pass_msg) self.logger.log_error(self.some_log_error_msg) # Then self.virtual_file.seek(0) six.assertRegex(self, self.virtual_file.readline(), LOGGER_PREAMBLE + re.escape( "open test case,,,Test case " + self.some_test_case_id + "\r\n")) six.assertRegex(self, self.virtual_file.readline(), LOGGER_PREAMBLE + re.escape("open step,,," + self.some_test_step_msg + "\r\n")) six.assertRegex(self, self.virtual_file.readline(), LOGGER_PREAMBLE + re.escape( "recv," + str(len(self.some_recv_data)) + "," + fuzz_logger_csv.DEFAULT_HEX_TO_STR( self.some_recv_data) + "," + self.some_recv_data.decode() + "\r\n")) six.assertRegex(self, self.virtual_file.readline(), LOGGER_PREAMBLE + re.escape( "send," + str(len(self.some_send_data)) + "," + fuzz_logger_csv.DEFAULT_HEX_TO_STR( self.some_send_data) + "," + self.some_send_data.decode() + "\r\n")) six.assertRegex(self, self.virtual_file.readline(), LOGGER_PREAMBLE + re.escape("info,,," + self.some_log_info_msg + "\r\n")) six.assertRegex(self, self.virtual_file.readline(), LOGGER_PREAMBLE + re.escape("check,,," + self.some_log_check_msg + "\r\n")) six.assertRegex(self, self.virtual_file.readline(), LOGGER_PREAMBLE + re.escape("fail,,," + self.some_log_fail_msg + "\r\n")) six.assertRegex(self, self.virtual_file.readline(), LOGGER_PREAMBLE + re.escape("pass,,," + self.some_log_pass_msg + "\r\n")) six.assertRegex(self, self.virtual_file.readline(), LOGGER_PREAMBLE + re.escape("error,,," + self.some_log_error_msg + "\r\n"))
def testVersion(self): cmd = [FLAGS.mtt_path, '--no_check_update', 'version'] outs, _ = _RunCmd(cmd) six.assertRegex(self, outs, r'Version: .*')
def test_use_filename(self): """should successfully take use file name of uploaded file in public id if specified use_filename """ result = uploader.upload(TEST_IMAGE, use_filename=True, tags=[UNIQUE_TAG]) six.assertRegex(self, result["public_id"], 'logo_[a-z0-9]{6}') result = uploader.upload(TEST_IMAGE, use_filename=True, unique_filename=False, tags=[UNIQUE_TAG]) self.assertEqual(result["public_id"], 'logo')
def test_upload_preset(self): """ should support unsigned uploading using presets """ preset = api.create_upload_preset(folder="upload_folder", unsigned=True, tags=[UNIQUE_TAG]) result = uploader.unsigned_upload(TEST_IMAGE, preset["name"], tags=[UNIQUE_TAG]) six.assertRegex(self, result["public_id"], '^upload_folder\/[a-z0-9]+$') api.delete_upload_preset(preset["name"])
def testAutoDescribeRoute(self): testRuns = [] registerAccessFlag('my_flag', name='My flag') class AutoDescribe(Resource): def __init__(self): super(AutoDescribe, self).__init__() self.resourceName = 'auto_describe' self.route('GET', ('test', ), self.test) self.route('POST', ('body', ), self.body) self.route('POST', ('json_body', ), self.jsonBody) self.route('POST', ('json_body_required', ), self.jsonBodyRequired) self.route('GET', ('model_param_flags', ), self.hasModelParamFlags) self.route('GET', ('model_param_query', ), self.hasModelQueryParam) self.route('GET', ('json_schema', ), self.hasJsonSchema) self.route('GET', ('missing_arg', ), self.hasMissingArg) @access.public @describe.autoDescribeRoute( describe.Description('test').param( 'b1', '', dataType='boolean', required=False, default=True).param('b2', '', dataType='boolean', required=False).param( 'float', '', dataType='number', required=False, default=1.0).param( 'integer', '', dataType='integer', required=False).param( 'timestamp', '', dataType='dateTime', required=False). param('datestamp', '', dataType='date', required=False).param( 'string', '', enum=['hello', 'world'], strip=True, lower=True).param('upper', '', required=False, upper=True).jsonParam( 'json1', '', required=False, requireArray=True).jsonParam( 'json2', '', required=False, requireObject=True, default={})) def test(self, b1, b2, string, upper, integer, float, timestamp, datestamp, json1, json2, params): testRuns.append({ 'b1': b1, 'b2': b2, 'string': string, 'upper': upper, 'integer': integer, 'float': float, 'timestamp': timestamp, 'datestamp': datestamp, 'json1': json1, 'json2': json2 }) @access.public @describe.autoDescribeRoute( describe.Description('body').param('body', '', required=False, paramType='body')) def body(self, body): testRuns.append({'body': body}) @access.public @describe.autoDescribeRoute( describe.Description('json_body').jsonParam('json_body', '', required=False, paramType='body')) def jsonBody(self, json_body): testRuns.append({'json_body': json_body}) @access.public @describe.autoDescribeRoute( describe.Description('json_body_required').jsonParam( 'json_body', '', required=True, requireObject=True, paramType='body')) def jsonBodyRequired(self, json_body): testRuns.append({'json_body': json_body}) @access.public @describe.autoDescribeRoute( describe.Description('has_model_param_query').modelParam( 'userId', model='user', level=AccessType.READ, paramType='query')) @filtermodel(model='user') def hasModelQueryParam(self, user): return user @access.public @describe.autoDescribeRoute( describe.Description('has_model_param_flags').modelParam( 'userId', model='user', level=AccessType.READ, paramType='query', requiredFlags='my_flag')) def hasModelParamFlags(self, user): return user @access.public @describe.autoDescribeRoute( describe.Description('has_json_schema').jsonParam( 'obj', '', schema={ 'type': 'object', 'required': ['foo', 'bar'] })) def hasJsonSchema(self, obj): return obj @access.public @describe.autoDescribeRoute( describe.Description('has_missing_arg').param('foo', '')) def hasMissingArg(self, params): return params server.root.api.v1.auto_describe = AutoDescribe() def testBad(inputs, expected): resp = self.request('/auto_describe/test', params=inputs) self.assertStatus(resp, 400) self.assertEqual(testRuns, []) self.assertEqual(resp.json['message'], expected) def testOk(inputs, expected): resp = self.request('/auto_describe/test', params=inputs) self.assertStatusOk(resp) self.assertEqual(len(testRuns), 1) self.assertEqual(testRuns[0], expected) del testRuns[-1] testBad({}, 'Parameter "string" is required.') testBad({ 'string': 'invalid value' }, 'Invalid value for string: "invalid value". Allowed values: hello, world.' ) testBad({ 'string': 'hello', 'float': 'not a float' }, 'Invalid value for numeric parameter float: not a float.') testBad({ 'string': 'hello', 'integer': '7.5' }, 'Invalid value for integer parameter integer: 7.5.') testBad({ 'string': 'hello', 'timestamp': 'hello world' }, 'Invalid date format for parameter timestamp: hello world.') testBad({ 'string': 'hello', 'datestamp': 'not a date' }, 'Invalid date format for parameter datestamp: not a date.') testBad({ 'string': 'hello', 'json1': json.dumps({'hello': 'world'}) }, 'Parameter json1 must be a JSON array.') testBad({ 'string': 'hello', 'json2': json.dumps(['hello', 'world']) }, 'Parameter json2 must be a JSON object.') testOk({'string': ' WoRlD '}, { 'string': 'world', 'upper': None, 'b1': True, 'b2': None, 'integer': None, 'float': 1., 'json1': None, 'json2': {}, 'timestamp': None, 'datestamp': None }) testOk( { 'string': ' hello', 'upper': ' hello', 'b1': 'false', 'b2': 'true', 'integer': '3', 'float': '0.25', 'json1': json.dumps([1, 2, 'abc']), 'json2': json.dumps({'hello': 'world'}), 'timestamp': '2017-01-01T11:35:22', 'datestamp': '2017-02-02T11:33:22' }, { 'string': 'hello', 'upper': ' HELLO', 'b1': False, 'b2': True, 'integer': 3, 'float': 0.25, 'json1': [1, 2, 'abc'], 'json2': { 'hello': 'world' }, 'timestamp': datetime.datetime(2017, 1, 1, 11, 35, 22), 'datestamp': datetime.date(2017, 2, 2) }) # Test request body body = 'torso' resp = self.request('/auto_describe/body', method='POST', body=json.dumps(body), type='application/json') self.assertStatusOk(resp) self.assertEqual(len(testRuns), 1) self.assertTrue('body' in testRuns[0]) self.assertTrue(hasattr(testRuns[0]['body'], 'read')) del testRuns[-1] # Test request JSON body (optional) body = {'emmet': 'otter'} resp = self.request('/auto_describe/json_body', method='POST', body=json.dumps(body), type='application/json') self.assertStatusOk(resp) self.assertEqual(len(testRuns), 1) expected = {'json_body': body} self.assertEqual(testRuns[0], expected) del testRuns[-1] # Test request JSON body (optional), omitting body resp = self.request('/auto_describe/json_body', method='POST') self.assertStatusOk(resp) self.assertEqual(len(testRuns), 1) expected = {'json_body': None} self.assertEqual(testRuns[0], expected) del testRuns[-1] # Test request JSON body (required) body = {'emmet': 'otter'} resp = self.request('/auto_describe/json_body_required', method='POST', body=json.dumps(body), type='application/json') self.assertStatusOk(resp) self.assertEqual(len(testRuns), 1) expected = {'json_body': body} self.assertEqual(testRuns[0], expected) del testRuns[-1] # Test request JSON body (required), omitting body resp = self.request('/auto_describe/json_body_required', method='POST') self.assertStatus(resp, 400) # Test request JSON body (required), pass list body = [{'emmet': 'otter'}] resp = self.request('/auto_describe/json_body_required', method='POST', body=json.dumps(body), type='application/json') self.assertStatus(resp, 400) # Test omission of required modelParam resp = self.request('/auto_describe/model_param_query') self.assertStatus(resp, 400) self.assertEqual(resp.json['message'], 'Parameter "userId" is required.') resp = self.request('/auto_describe/model_param_query', params={'userId': None}) self.assertStatus(resp, 400) self.assertEqual(resp.json['message'], 'Invalid ObjectId: None') user = User().createUser(firstName='admin', lastName='admin', email='*****@*****.**', login='******', password='******') resp = self.request('/auto_describe/model_param_query', user=user, params={'userId': user['_id']}) self.assertStatusOk(resp) # Test requiredFlags in modelParam resp = self.request('/auto_describe/model_param_flags', params={'userId': user['_id']}) self.assertStatus(resp, 401) six.assertRegex(self, resp.json['message'], '^Access denied for user') resp = self.request('/auto_describe/json_schema', params={'obj': json.dumps([])}) self.assertStatus(resp, 400) six.assertRegex( self, resp.json['message'], r"^Invalid JSON object for parameter obj: \[\] is not of type 'object'" ) resp = self.request('/auto_describe/json_schema', params={'obj': json.dumps({})}) self.assertStatus(resp, 400) six.assertRegex( self, resp.json['message'], r"^Invalid JSON object for parameter obj: 'foo' is a required property" ) obj = {'foo': 1, 'bar': 2} resp = self.request('/auto_describe/json_schema', params={'obj': json.dumps(obj)}) self.assertStatusOk(resp) self.assertEqual(resp.json, obj) # Test missing arg in wrapped function, should fall through to params dict resp = self.request('/auto_describe/missing_arg', params={'foo': 'bar'}) self.assertStatusOk(resp) self.assertEqual(resp.json, {'foo': 'bar'})
def _AssertRunfilesLibraryInBazelToolsRepo(self, family, lang_name): for s, t, exe in [ ("WORKSPACE.mock", "WORKSPACE", False), ("foo/BUILD.mock", "foo/BUILD", False), ("foo/foo.py", "foo/foo.py", True), ("foo/Foo.java", "foo/Foo.java", False), ("foo/foo.sh", "foo/foo.sh", True), ("foo/datadep/hello.txt", "foo/datadep/hello.txt", False), ("bar/BUILD.mock", "bar/BUILD", False), ("bar/bar.py", "bar/bar.py", True), ("bar/bar-py-data.txt", "bar/bar-py-data.txt", False), ("bar/Bar.java", "bar/Bar.java", False), ("bar/bar-java-data.txt", "bar/bar-java-data.txt", False), ("bar/bar.sh", "bar/bar.sh", True), ("bar/bar-sh-data.txt", "bar/bar-sh-data.txt", False)]: self.CopyFile( self.Rlocation("io_bazel/src/test/py/bazel/testdata/runfiles_test/" + s), t, exe) exit_code, stdout, stderr = self.RunBazel(["info", "bazel-bin"]) self.AssertExitCode(exit_code, 0, stderr) bazel_bin = stdout[0] exit_code, _, stderr = self.RunBazel(["build", "//foo:runfiles-" + family]) self.AssertExitCode(exit_code, 0, stderr) if test_base.TestBase.IsWindows(): bin_path = os.path.join(bazel_bin, "foo/runfiles-%s.exe" % family) else: bin_path = os.path.join(bazel_bin, "foo/runfiles-" + family) self.assertTrue(os.path.exists(bin_path)) exit_code, stdout, stderr = self.RunProgram( [bin_path], env_add={"TEST_SRCDIR": "__ignore_me__"}) self.AssertExitCode(exit_code, 0, stderr) if len(stdout) != 8: self.fail("stdout: %s" % stdout) self.assertEqual(stdout[0], "Hello %s Foo!" % lang_name) six.assertRegex(self, stdout[1], "^rloc=.*/foo/datadep/hello.txt") self.assertNotIn("__ignore_me__", stdout[1]) with open(stdout[1].split("=", 1)[1], "r") as f: lines = [l.strip() for l in f.readlines()] if len(lines) != 1: self.fail("lines: %s" % lines) self.assertEqual(lines[0], "world") i = 2 for lang in [("py", "Python", "bar.py"), ("java", "Java", "Bar.java"), ("sh", "Bash", "bar.sh")]: self.assertEqual(stdout[i], "Hello %s Bar!" % lang[1]) six.assertRegex(self, stdout[i + 1], "^rloc=.*/bar/bar-%s-data.txt" % lang[0]) self.assertNotIn("__ignore_me__", stdout[i + 1]) with open(stdout[i + 1].split("=", 1)[1], "r") as f: lines = [l.strip() for l in f.readlines()] if len(lines) != 1: self.fail("lines(%s): %s" % (lang[0], lines)) self.assertEqual(lines[0], "data for " + lang[2]) i += 2