def test1(self): """ it should return a data if a valid path was given """ ### make requset with a vaild path ### request = tornado.httpclient.HTTPRequest(\ url=self.get_url('/document/admin/administrator/pages'),\ method="GET", ) # wait for response self.http_client.fetch(request, self.stop) response = self.wait() # get data data = json.loads(response.body) # response is a json assert_equals(type(data), dict) assert_is_not_none(data['resp']) assert_is_not_none(data['cache']) # response is okay assert_equals(response.reason, 'OK') # there where no errors assert_is_none(response.error)
def test_has_required_lazy(): m = hdf5storage.Marshallers.TypeMarshaller() m.required_parent_modules = ['json'] m.required_modules = ['json'] m.python_type_strings = ['ellipsis'] m.types = ["<type '" + s + "'>" for s in m.python_type_strings] for name in m.required_modules: assert_not_in(name, sys.modules) mc = hdf5storage.MarshallerCollection(lazy_loading=True, marshallers=[m]) for name in m.required_modules: assert_not_in(name, sys.modules) assert mc._has_required_modules[-1] assert_false(mc._imported_required_modules[-1]) mback, has_modules = mc.get_marshaller_for_type_string( \ m.python_type_strings[0]) assert_is_not_none(mback) assert has_modules assert mc._has_required_modules[-1] assert mc._imported_required_modules[-1] for name in m.required_modules: assert_in(name, sys.modules) # Do it again, but this time the modules are already loaded so that # flag should be set. mc = hdf5storage.MarshallerCollection(lazy_loading=True, marshallers=[m]) assert mc._has_required_modules[-1] assert mc._imported_required_modules[-1] mback, has_modules = mc.get_marshaller_for_type_string( \ m.python_type_strings[0]) assert_is_not_none(mback) assert has_modules assert mc._has_required_modules[-1] assert mc._imported_required_modules[-1]
def test_teams(): name = "My Uniquely Named Team " + str(uuid.uuid4()) team = syn.store(Team(name=name, description="A fake team for testing...")) schedule_for_cleanup(team) found_team = syn.getTeam(team.id) assert_equals(team, found_team) p = syn.getUserProfile() found = None for m in syn.getTeamMembers(team): if m.member.ownerId == p.ownerId: found = m break assert_is_not_none(found, "Couldn't find user {} in team".format(p.userName)) # needs to be retried 'cause appending to the search index is asynchronous tries = 10 found_team = None while tries > 0: try: found_team = syn.getTeam(name) break except ValueError: tries -= 1 if tries > 0: time.sleep(1) assert_equals(team, found_team)
def test_register_site_command(): # preset register client command response c = Client(config_location) c.oxd_id = None assert_is_none(c.oxd_id) c.register_site() assert_is_not_none(c.oxd_id)
def test_access_restrictions(self): with patch.object(syn, '_getEntityBundle', return_value={ 'annotations': { 'etag': 'cbda8e02-a83e-4435-96d0-0af4d3684a90', 'id': 'syn1000002', 'stringAnnotations': {}}, 'entity': { 'concreteType': 'org.sagebionetworks.repo.model.FileEntity', 'createdBy': 'Miles Dewey Davis', 'entityType': 'org.sagebionetworks.repo.model.FileEntity', 'etag': 'cbda8e02-a83e-4435-96d0-0af4d3684a90', 'id': 'syn1000002', 'name': 'so_what.mp3', 'parentId': 'syn1000001', 'versionLabel': '1', 'versionNumber': 1, 'dataFileHandleId': '42'}, 'entityType': 'org.sagebionetworks.repo.model.FileEntity', 'fileHandles': [ { 'id': '42' } ], 'restrictionInformation': {'hasUnmetAccessRequirement': True} }): entity = syn.get('syn1000002', downloadFile=False) assert_is_not_none(entity) assert_is_none(entity.path) # Downloading the file is the default, but is an error if we have unmet access requirements assert_raises(synapseclient.exceptions.SynapseUnmetAccessRestrictions, syn.get, 'syn1000002', downloadFile=True)
def test_synStore_sftpIntegration(): """Creates a File Entity on an sftp server and add the external url. """ filepath = utils.make_bogus_binary_file(1 * MB - 777771) try: file = syn.store(File(filepath, parent=project)) file2 = syn.get(file) assert file.externalURL == file2.externalURL and urlparse(file2.externalURL).scheme == "sftp" tmpdir = tempfile.mkdtemp() schedule_for_cleanup(tmpdir) ## test filename override file2.fileNameOverride = "whats_new_in_baltimore.data" file2 = syn.store(file2) ## TODO We haven't defined how filename override interacts with ## TODO previously cached files so, side-step that for now by ## TODO making sure the file is not in the cache! syn.cache.remove(file2.dataFileHandleId, delete=True) file3 = syn.get(file, downloadLocation=tmpdir) assert os.path.basename(file3.path) == file2.fileNameOverride ## test that we got an MD5 à la SYNPY-185 assert_is_not_none(file3.md5) fh = syn._getFileHandle(file3.dataFileHandleId) assert_is_not_none(fh["contentMd5"]) assert_equals(file3.md5, fh["contentMd5"]) finally: try: os.remove(filepath) except Exception: print(traceback.format_exc())
def test_which(self): python_path = which.which('python') assert_is_not_none(python_path, 'python is not discovered in the' ' executable path') bogus_path = which.which('bogus') assert_is_none(bogus_path, "bogus is found in the executable path" " at: %s" % bogus_path)
def __test_save_article(self): self.skr_article_data['title'] = compare_title self.skr_article_data['content'] = compare_content data = json.dumps(self.skr_article_data) response = test_app.post('/api/v1/article', data=data, content_type='application/json') tools.assert_equals(response.status_code, 200) json_resp = json.loads(response.data) tools.assert_equals(response.status_code, 200) tools.assert_is_not_none(json_resp.get('data')) tools.assert_is_not_none(json_resp.get('data').get('source')) self.tech_article_data['title'] = test_title self.tech_article_data['content'] = test_content data = json.dumps(self.tech_article_data) response = test_app.post('/api/v1/article', data=data, content_type='application/json') tools.assert_equals(response.status_code, 200) self.tesla_article_data['title'] = test_title self.tesla_article_data['content'] = test_content data = json.dumps(self.tesla_article_data) response = test_app.post('/api/v1/article', data=data, content_type='application/json') tools.assert_equals(response.status_code, 200) self.article_url_list.append(self.skr_article_data['url']) self.article_url_list.append(self.tech_article_data['url']) self.article_url_list.append(self.tesla_article_data['url'])
def test_sql(self): thing = self.magics.sql('-r -f select * from blah where something = 1') nt.assert_is_not_none(thing) # uhm, not sure what to check... lst = [{'x': 'y'}, {'e': 'f'}] d = {'a': 'b'} dct = { 'zzz': d, 'yyy': lst } # params and multiparams ret = self.ipydb.execute.return_value ret.returns_rows = True self.ipython.user_ns = dct self.magics.sql('-a zzz -m yyy select * from foo') self.ipydb.execute.assert_called_with( 'select * from foo', params=d, multiparams=lst) ret.returns_rows = False ret.rowount = 2 self.magics.sql('-a zzz -m yyy select * from foo') r = self.magics.sql('-r select * from foo') nt.assert_equal(ret, r)
def test_correct(): """Tests the valid overall process.""" tmp = NamedTemporaryFile() tmp.write("ValidFaultTree\n\n") tmp.write("root := g1 | g2 | g3 | g4 | g7 | e1\n") tmp.write("g1 := e2 & g3 & g5\n") tmp.write("g2 := h1 & g6\n") tmp.write("g3 := (g6 ^ e2)\n") tmp.write("g4 := @(2, [g5, e3, e4])\n") tmp.write("g5 := ~(e3)\n") tmp.write("g6 := (e3 | e4)\n\n") tmp.write("g7 := g8\n\n") tmp.write("g8 := ~e2 & ~e3\n\n") tmp.write("p(e1) = 0.1\n") tmp.write("p(e2) = 0.2\n") tmp.write("p(e3) = 0.3\n") tmp.write("s(h1) = true\n") tmp.write("s(h2) = false\n") tmp.flush() fault_tree = parse_input_file(tmp.name) assert_is_not_none(fault_tree) yield assert_equal, 9, len(fault_tree.gates) yield assert_equal, 3, len(fault_tree.basic_events) yield assert_equal, 2, len(fault_tree.house_events) yield assert_equal, 1, len(fault_tree.undefined_events()) out = NamedTemporaryFile() out.write("<?xml version=\"1.0\"?>\n") out.write(fault_tree.to_xml()) out.flush() relaxng_doc = etree.parse("../share/input.rng") relaxng = etree.RelaxNG(relaxng_doc) with open(out.name, "r") as test_file: doc = etree.parse(test_file) assert_true(relaxng.validate(doc))
def test_cluster_get(self): """ 测试cluster的get接口 """ service = ClusterService('2018/08/15') service.save_to_db() response = test_app.get('/api/v1/cluster?day=20180815') tools.assert_equals(response.status_code, 200) json_resp = json.loads(response.data) tools.assert_equals(response.status_code, 200) tools.assert_is_not_none(json_resp.get('data')) data = json_resp.get('data') tools.assert_equals(len(data), 2) news = data[0]['news'] tools.assert_equals(data[0]['topic']['title'], news[0]['title']) tools.assert_equals(news[0]['title'], news[1]['title']) first_topic = data[0]['topic']['title'] second_topic = data[1]['topic']['title'] # test update cluster, topic unchanged self.skr_article_data['title'] = compare_title self.skr_article_data['content'] = compare_content self.skr_article_data['url'] = 'http://www.skr.net/yeah/' data = json.dumps(self.skr_article_data) response = test_app.post('/api/v1/article', data=data, content_type='application/json') tools.assert_equals(response.status_code, 200) service = ClusterService('2018/08/15') service.save_to_db() response = test_app.get('/api/v1/cluster?day=20180815') tools.assert_equals(response.status_code, 200) json_resp = json.loads(response.data) tools.assert_equals(response.status_code, 200) tools.assert_is_not_none(json_resp.get('data')) data = json_resp.get('data') tools.assert_equals(len(data), 2) tools.assert_equals(first_topic, data[0]['topic']['title']) tools.assert_equals(second_topic, data[1]['topic']['title']) news = data[0]['news'] tools.assert_equals(data[0]['topic']['title'], news[0]['title']) tools.assert_equals(news[0]['title'], news[1]['title']) news = data[1]['news'] tools.assert_equals(data[1]['topic']['title'], news[0]['title']) tools.assert_equals(news[0]['title'], news[1]['title']) # test length of cluster is correct news_count = data[0]['news_count'] tools.assert_equals(news_count, 2) self.__test_send_mail()
def test_doi_config(self): account_name = config.get("ckanext.doi.account_name") account_password = config.get("ckanext.doi.account_password") assert_is_not_none(account_name) assert_is_not_none(account_password)
def test_simple_origin_matching_on_not_first_origin(self): checker = OriginAuthentication() client_auth = checker.check_origin_permission('http://localhost:8000', self.dataset) assert_is_not_none(client_auth) assert_true(isinstance(client_auth, tuple)) assert_equal(len(client_auth), 2)
def test_chorus_dois(self, test_data): doi = test_data # because cookies breaks the cache pickling # for doi_start in ["10.1109", "10.1161", "10.1093", "10.1007", "10.1039"]: # if doi.startswith(doi_start): requests_cache.uninstall_cache() my_pub = pub.lookup_product_by_doi(doi) if not my_pub: logger.info(u"doi {} not in db, skipping".format(doi)) return my_pub.refresh() logger.info(u"https://api.unpaywall.org/v2/{}?email=me".format(doi)) logger.info(u"doi: https://doi.org/{}".format(doi)) logger.info(u"license: {}".format(my_pub.best_license)) logger.info(u"evidence: {}".format(my_pub.best_evidence)) logger.info(u"host: {}".format(my_pub.best_host)) if my_pub.error: logger.info(my_pub.error) assert_equals(my_pub.error, "") assert_is_not_none(my_pub.fulltext_url)
def test2(self): """ it should return a error if an invalid path was given """ ### make requset with invalid path ### request = tornado.httpclient.HTTPRequest(\ url=self.get_url('/document/admin/administrator/invalid/path'),\ method="GET", ) # wait for response self.http_client.fetch(request, self.stop) response = self.wait() # reason shold be Bad Request assert_equals(response.reason,'Bad Request') # there where errors assert_is_not_none(response.error) # it should return this error messsage assert_equals(response.body,'tornado threw an exception')
def test_ddt_data_name_attribute(): """ Test the ``__name__`` attribute handling of ``data`` items with ``ddt`` """ def hello(): pass class Myint(int): pass class Mytest(object): pass d1 = Myint(1) d1.__name__ = "data1" d2 = Myint(2) data_hello = data(d1, d2)(hello) setattr(Mytest, "test_hello", data_hello) ddt_mytest = ddt(Mytest) assert_is_not_none(getattr(ddt_mytest, "test_hello_1_data1")) assert_is_not_none(getattr(ddt_mytest, "test_hello_2_2"))
def test_elements_all_select_properties(self): page = factories.PageFactory() concept = factories.ConceptFactory() Element.objects.create( display_index=0, element_type='SELECT', choices='[one, two, three]', concept=concept, question='test question', answer='', required=True, image='test', audio='ping', page=page ) element = Element.objects.get(concept=concept) assert_equals(element.display_index, 0) assert_equals(element.element_type, 'SELECT') assert_equals(element.choices, '[one, two, three]') assert_equals(element.concept, concept) assert_equals(element.question, 'test question') assert_equals(element.answer, '') assert_equals(element.page, page) assert_true(element.required) assert_equals(element.image, 'test') assert_equals(element.audio, 'ping') assert_is_not_none(element.last_modified, None) assert_is_not_none(element.created, None)
def test_customers_create(): fixture = helpers.load_fixture('customers')['create'] helpers.stub_response(fixture) response = helpers.client.customers.create(*fixture['url_params']) body = fixture['body']['customers'] assert_is_instance(response, resources.Customer) assert_is_not_none(responses.calls[-1].request.headers.get('Idempotency-Key')) assert_equal(response.address_line1, body.get('address_line1')) assert_equal(response.address_line2, body.get('address_line2')) assert_equal(response.address_line3, body.get('address_line3')) assert_equal(response.city, body.get('city')) assert_equal(response.company_name, body.get('company_name')) assert_equal(response.country_code, body.get('country_code')) assert_equal(response.created_at, body.get('created_at')) assert_equal(response.danish_identity_number, body.get('danish_identity_number')) assert_equal(response.email, body.get('email')) assert_equal(response.family_name, body.get('family_name')) assert_equal(response.given_name, body.get('given_name')) assert_equal(response.id, body.get('id')) assert_equal(response.language, body.get('language')) assert_equal(response.metadata, body.get('metadata')) assert_equal(response.phone_number, body.get('phone_number')) assert_equal(response.postal_code, body.get('postal_code')) assert_equal(response.region, body.get('region')) assert_equal(response.swedish_identity_number, body.get('swedish_identity_number'))
def test_write_batch(): with tmp_db('write_batch') as db: # Prepare a batch with some data batch = db.write_batch() for i in xrange(1000): batch.put(('batch-key-%d' % i).encode('UTF-8'), b'value') # Delete a key that was also set in the same (pending) batch batch.delete(b'batch-key-2') # The DB should not have any data before the batch is written assert_is_none(db.get(b'batch-key-1')) # ...but it should have data afterwards batch.write() assert_is_not_none(db.get(b'batch-key-1')) assert_is_none(db.get(b'batch-key-2')) # Batches can be cleared batch = db.write_batch() batch.put(b'this-is-never-saved', b'') batch.clear() batch.write() assert_is_none(db.get(b'this-is-never-saved')) # Batches take write options batch = db.write_batch(sync=True) batch.put(b'batch-key-sync', b'') batch.write()
def test_copy_msg(self): x = NetParameter() assert_is_not_none(x) y = pu.copy_msg(x, NetParameter) assert_is_not(x, y) assert_is_not_none(y)
def test_ddt_data_name_attribute(): """ Test the ``__name__`` attribute handling of ``data`` items with ``ddt`` """ def hello(): pass class myint(int): pass class mytest(object): pass d1 = myint(1) d1.__name__ = 'data1' d2 = myint(2) data_hello = data(d1, d2)(hello) setattr(mytest, 'test_hello', data_hello) ddt_mytest = ddt(mytest) assert_is_not_none(getattr(ddt_mytest, 'test_hello_data1')) assert_is_not_none(getattr(ddt_mytest, 'test_hello_2'))
def i_create_a_local_ensemble_prediction_op(step, data=None, operating_point=None): if data is None: data = "{}" assert_is_not_none(operating_point) data = json.loads(data) world.local_prediction = world.local_ensemble.predict( \ data, operating_point=operating_point)
def i_create_a_local_prediction_op_kind(step, data=None, operating_kind=None): if data is None: data = "{}" assert_is_not_none(operating_kind) data = json.loads(data) world.local_prediction = world.local_model.predict( \ data, operating_kind=operating_kind)
def _test_format_change(self, to_format): controller = self._get_file_controller(MINIMAL_SUITE_PATH) assert_is_not_none(controller) controller.save_with_new_format(to_format) self._assert_removed(MINIMAL_SUITE_PATH) path_with_tsv = os.path.splitext(MINIMAL_SUITE_PATH)[0] + '.'+to_format self._assert_serialized(path_with_tsv)
def _check_third_party_result_for_approved_user(application_pk_id): with DBHelper() as db_helper: ret = db_helper.get_third_party_result_by_type(VerifyThirdPartyTypeEnum.JUXINLI_IDCARD, application_pk_id) tools.assert_is_not_none(ret) tools.assert_equal(2, len(ret)) tools.assert_equal(ret[0], "EXCHANGE_SUCCESS") tools.assert_equal(ret[1], """{"error_code":"31200","error_msg":"此人不在黑名单","result":"{}"}""")
def test_learning_curve_from_dir(self): lc = LearningCurveFromPath(os.path.split(self.fpath)[0]) assert_is_not_none(lc) train_keys, test_keys = lc.parse() assert_list_equal(train_keys, ['NumIters', 'Seconds', 'LearningRate', 'loss']) assert_list_equal(test_keys, ['NumIters', 'Seconds', 'LearningRate', 'accuracy', 'loss'])
def test_elements_all_plugin_properties(self): page = factories.PageFactory() concept = factories.ConceptFactory() Element.objects.create( display_index=0, element_type='PLUGIN', concept=concept, question='test question', answer='', required=True, image='test', audio='ping', action='action', mime_type='text/javascript', page=page ) element = Element.objects.get(concept=concept) assert_equals(element.display_index, 0) assert_equals(element.element_type, 'PLUGIN') assert_equals(element.concept, concept) assert_equals(element.question, 'test question') assert_equals(element.answer, '') assert_equals(element.page, page) assert_true(element.required) assert_equals(element.image, 'test') assert_equals(element.audio, 'ping') assert_equals(element.action, 'action') assert_equals(element.mime_type, 'text/javascript') assert_is_not_none(element.last_modified, None) assert_is_not_none(element.created, None)
def test_single_thread_upload(): synapseclient.config.single_threaded = True try: filepath = utils.make_bogus_binary_file(multipart_upload_module.MIN_PART_SIZE * 2 + 1) assert_is_not_none(multipart_upload(syn, filepath)) finally: synapseclient.config.single_threaded = False
def test_builtin_init(): info = inspector.info(list) init_def = info['init_definition'] # Python < 3.4 can't get init definition from builtins, # but still exercise the inspection in case of error-raising bugs. if sys.version_info >= (3,4): nt.assert_is_not_none(init_def)
def test_config_entity_api__permissions(self): """ Make sure that users only get ConfigEntity's that match their permission settings :return: """ permission_configuration = TestConfigEntityPermissions.config_entity_configuration() resource_name = 'config_entity' # Iterate through the test_configurstions and extract a user for each group_key # Make a dict with the user as the key and all the instances from the test_config that the # user corresponds to. This gives a lookup of a user to the config_entities that we expect # the user to be able to view # Create a user->instances dict # Combine our {user1:instances, user2:instances,...} dicts user_to_expected_instances = merge_dict_list_values( *map( lambda test_configuration:\ # Combine our [user, instance] pairs into {user1:instances, user2:instances,...} # Remove null keys (owing to groups with no users) compact_dict(map_to_dict_with_lists( # Each test_configuration has several groups. # For each group resolve a user and return [user, instance] lambda group_key: [ get_first_value_or_none(Group.objects.get(name=group_key).user_set.all()), test_configuration['instance']], test_configuration['groups'].keys())), permission_configuration.test_configuration) ) all_instances = set(unique(flatten(user_to_expected_instances.values()))) for user, instances in user_to_expected_instances.items(): other_instances = all_instances - set(instances) # Fetch all instances with this user and create a lookup so we can test # that the resulting instances are present or not present as expected according to # the permissions response = self.get(resource_name, user=user) result_instance_lookup = map_to_dict( lambda instance_dict: [int(instance_dict['id']), instance_dict], self.deserialize(response)['objects']) for instance in instances: matching_instance = result_instance_lookup.get(instance.id) assert_is_not_none(matching_instance, "User %s should have view permission to instance %s with id %s and key %s but does." % \ (user.username, instance, instance.id, permission_configuration.key_class.Fab.remove( permission_configuration.instance_key_lambda(instance)))) for instance in other_instances: assert_is_none(matching_instance, "User %s should not have view permission to instance %s with id %s and key %s but does." % \ (user.username, instance, instance.id, permission_configuration.key_class.Fab.remove( permission_configuration.instance_key_lambda(instance))))
def test_Entity(): # Update the project project_name = str(uuid.uuid4()) project = Project(name=project_name) project = syn.store(project) schedule_for_cleanup(project) project = syn.get(project) assert_equals(project.name, project_name) # Create and get a Folder folder = Folder('Test Folder', parent=project, description='A place to put my junk', foo=1000) folder = syn.store(folder) folder = syn.get(folder) assert_equals(folder.name, 'Test Folder') assert_equals(folder.parentId, project.id) assert_equals(folder.description, 'A place to put my junk') assert_equals(folder.foo[0], 1000) # Update and get the Folder folder.pi = 3.14159265359 folder.description = 'The rejects from the other folder' folder = syn.store(folder) folder = syn.get(folder) assert_equals(folder.name, 'Test Folder') assert_equals(folder.parentId, project.id) assert_equals(folder.description, 'The rejects from the other folder') assert_equals(folder.pi[0], 3.14159265359) # Test CRUD on Files, check unicode path = utils.make_bogus_data_file() schedule_for_cleanup(path) a_file = File(path, parent=folder, description=u'Description with funny characters: Déjà vu, ประเทศไทย, 中国', contentType='text/flapdoodle', foo='An arbitrary value', bar=[33, 44, 55], bday=Datetime(2013, 3, 15), band=u"Motörhead", lunch=u"すし") a_file = syn.store(a_file) assert_equals(a_file.path, path) a_file = syn.get(a_file) assert_equals(a_file.description, u'Description with funny characters: Déjà vu, ประเทศไทย, 中国', u'description= %s' % a_file.description) assert_equals(a_file['foo'][0], 'An arbitrary value', u'foo= %s' % a_file['foo'][0]) assert_equals(a_file['bar'], [33, 44, 55]) assert_equals(a_file['bday'][0], Datetime(2013, 3, 15)) assert_equals(a_file.contentType, 'text/flapdoodle', u'contentType= %s' % a_file.contentType) assert_equals(a_file['band'][0], u"Motörhead", u'band= %s' % a_file['band'][0]) assert_equals(a_file['lunch'][0], u"すし", u'lunch= %s' % a_file['lunch'][0]) a_file = syn.get(a_file) assert_true(filecmp.cmp(path, a_file.path)) b_file = File(name="blah", parent=folder, dataFileHandleId=a_file.dataFileHandleId) b_file = syn.store(b_file) assert_equals(b_file.dataFileHandleId, a_file.dataFileHandleId) # Update the File a_file.path = path a_file['foo'] = 'Another arbitrary chunk of text data' a_file['new_key'] = 'A newly created value' a_file = syn.store(a_file, forceVersion=False) assert_equals(a_file['foo'][0], 'Another arbitrary chunk of text data') assert_equals(a_file['bar'], [33, 44, 55]) assert_equals(a_file['bday'][0], Datetime(2013, 3, 15)) assert_equals(a_file.new_key[0], 'A newly created value') assert_equals(a_file.path, path) assert_equals(a_file.versionNumber, 1, "unexpected version number: " + str(a_file.versionNumber)) # Test create, store, get Links # If version isn't specified, targetVersionNumber should not be set link = Link(a_file['id'], parent=project) link = syn.store(link) assert_equals(link['linksTo']['targetId'], a_file['id']) assert_is_none(link['linksTo'].get('targetVersionNumber')) assert_equals(link['linksToClassName'], a_file['concreteType']) link = Link(a_file['id'], targetVersion=a_file.versionNumber, parent=project) link = syn.store(link) assert_equals(link['linksTo']['targetId'], a_file['id']) assert_equals(link['linksTo']['targetVersionNumber'], a_file.versionNumber) assert_equals(link['linksToClassName'], a_file['concreteType']) testLink = syn.get(link) assert_equals(testLink, link) link = syn.get(link, followLink=True) assert_equals(link['foo'][0], 'Another arbitrary chunk of text data') assert_equals(link['bar'], [33, 44, 55]) assert_equals(link['bday'][0], Datetime(2013, 3, 15)) assert_equals(link.new_key[0], 'A newly created value') assert_true(utils.equal_paths(link.path, path)) assert_equals(link.versionNumber, 1, "unexpected version number: " + str(a_file.versionNumber)) newfolder = Folder('Testing Folder', parent=project) newfolder = syn.store(newfolder) link = Link(newfolder, parent=folder.id) link = syn.store(link) assert_equals(link['linksTo']['targetId'], newfolder.id) assert_equals(link['linksToClassName'], newfolder['concreteType']) assert_is_none(link['linksTo'].get('targetVersionNumber')) # Upload a new File and verify new_path = utils.make_bogus_data_file() schedule_for_cleanup(new_path) a_file.path = new_path a_file = syn.store(a_file) a_file = syn.get(a_file) assert_true(filecmp.cmp(new_path, a_file.path)) assert_equals(a_file.versionNumber, 2) # Make sure we can still get the older version of file old_random_data = syn.get(a_file.id, version=1) assert_true(filecmp.cmp(old_random_data.path, path)) tmpdir = tempfile.mkdtemp() schedule_for_cleanup(tmpdir) # test getting the file from the cache with downloadLocation parameter (SYNPY-330) a_file_cached = syn.get(a_file.id, downloadLocation=tmpdir) assert_is_not_none(a_file_cached.path) assert_equal(os.path.basename(a_file_cached.path), os.path.basename(a_file.path))
def assert_data_type_valid(self, data_type): concept = factories.ConceptFactory(data_type=data_type) assert_is_not_none(concept)
def test_get_reference_by_id(): assert_is_none(get_reference_by_id()) assert_is_not_none(get_reference_by_id(1))
def test_assign_volunteer_show_unavailable_event(self): '''test how event shows when volunteer not available''' data = self.set_basic_opportunity() unavail_sched = SchedEventFactory( eventitem=data['current_sched'].eventitem, starttime=datetime(2016, 2, 7, 12, 0, 0, 0, pytz.utc), max_volunteer=10) EventLabelFactory(event=unavail_sched, text=data['context'].conference.conference_slug) EventLabelFactory(event=unavail_sched, text="Volunteer") unavail_window = VolunteerWindowFactory( day__conference=data['context'].conference, day__day=date(2016, 2, 7), start=time(11), end=time(15)) volunteer = VolunteerFactory(b_conference=data['context'].conference, submitted=True) VolunteerInterestFactory(volunteer=data['volunteer']) data['volunteer'].unavailable_windows.add(unavail_window) data['volunteer'].save() url = reverse(self.view_name, args=[data['volunteer'].pk], urlconf="gbe.urls") login_as(self.privileged_user, self) response = self.client.get(url) nt.assert_equal(response.status_code, 200) nt.assert_true('Assign Volunteer to Opportunities' in response.content) # event names nt.assert_equal( response.content.count(str(data['current_sched'].eventitem)), 2, msg="There should be 2 schedule items for current_sched.eventitem") nt.assert_in(date_format(unavail_sched.start_time, "SHORT_DATETIME_FORMAT"), response.content, msg="start time for unavail_sched didn't show up") nt.assert_in(date_format(unavail_sched.end_time, "TIME_FORMAT"), response.content, msg="end time for unavail_sched didn't show up") nt.assert_is_not_none( re.search( date_format(unavail_sched.start_time, "SHORT_DATETIME_FORMAT"), response.content), msg="unavail_window shows with unavail_sched is not found") nt.assert_equal( response.content.count('''<td class="bid-table">10</td>'''), 2, msg="unavail_sched and current_sched should have 10 volunteers") nt.assert_true( "Not Free<br>" in response.content, msg="The volunteer should be not free for unavail_sched event") nt.assert_is_not_none( re.search('''<td class="bid-table">\s+N\s+</td>''', response.content), msg="The unavailable event should be not yet full")
def test_convert_poly(self): gps = jobs.detect_exif_gps(self.file_path_gps) gps_poly = jobs.make_poly(gps[0], gps[1], 0.00001) nt.assert_is_not_none(gps_poly) nt.assert_true(gps_poly.startswith('POLYGON('))
def test_get_app(): """App Integration - App Base, Get App""" config = AppConfig(get_valid_config_dict('duo_auth')) app = get_app(config) assert_is_not_none(app)
def test_destination_to_string(): for destination in range(10): assert_is_not_none(destination_to_string(destination))
def test_all_minids(identifier): ''' Test minids with error handling ''' try: minid_obj = Minid(identifier) minid_obj.fetch() payload = minid_obj.to_json_ld() assert_is_not_none(minid_obj) assert_is_not_none(payload) assert_is_instance(payload, dict) logging.getLogger('minid').info({ 'identifier': identifier, 'json_ld': payload }) except (Identifier404) as err: # test json response err_json = err.json_response() err_html = err.html_response() assert_is_not_none(err_json) assert_is_instance(err_json, Response) assert_equal(err_json.status, '404 NOT FOUND') # dumped json should be bytes assert_is_not_none(err_json.response[0]) # test html assert_is_not_none(err_html) logging.getLogger('minid').error({ 'identifier': identifier, 'error': err_json.status, 'message': err_json.response[0].decode('utf-8'), 'html': err_html }) except (OutOfPath400) as err: # test json response err_json = err.json_response() err_html = err.html_response() assert_is_not_none(err_json) assert_is_instance(err_json, Response) assert_equal(err_json.status, '400 BAD REQUEST') # dumped json should be bytes assert_is_instance(err_json.response, list) assert_true(err_json.response[0] != b'') logging.getLogger('minid').error({ 'identifier': identifier, 'error': err_json.status, 'message': err_json.response[0].decode('utf-8'), 'html': err_html }) # test html assert_is_not_none(err_html)
def test_get_value(): x = component.get_value(var_name) assert_is_not_none(x)
def test_generates_string_output(self): protocol = generators.ProtocolBuilder.generate(factories.UserFactory(), self.procedure.id) assert_is_not_none(protocol)
def test_load_default_config(): config = load_config(None) assert_is_not_none(config)
def test_generates_node(self): generator = generators.CriteriaNodeGenerator(self.data, factories.PageFactory()) assert_is_not_none( generator.get_etree_node(ElementTree.Element('test')))
def test_generates(self): assert_is_not_none(self.node_etree_element)
def test_full_generate(self): assert_is_not_none(self.show_if_etree_element)
def test_find_default_keywords(self): all_kws = self.ns.get_all_keywords([]) assert_is_not_none(all_kws) self.assert_in_keywords(all_kws, 'Should Be Equal')
def assert_element_type_valid(self, element_type): element = factories.ElementFactory(element_type=element_type) assert_is_not_none(element)
def test_find_user_keyword_name_normalized(self): assert_is_not_none( self.ns.find_user_keyword( self.tcf, 'UK Fromresource from rESOURCE with variaBLE')) assert_is_none(self.ns.find_user_keyword(self.tcf, 'Copy List'))
def check_format_notice_body_not_none(name, view, notice): app = make() with app.app_context(): assert_is_not_none(mod(name).format_notice_body(view, notice))
def test_query(mock_get): """ Test a valid query and make sure results are returned """ resp = do_query(mock_get) assert_is_not_none(resp[0])
def test_version(): assert_is_not_none(elm.__version__) assert_is_instance(elm.__version__, str) assert_greater(len(elm.__version__), 0)
def test_user_defined_properties(): """OutputDispatcher - User Defined Properties""" for output in StreamAlertOutput.get_all_outputs().values(): props = output.get_user_defined_properties() # The user defined properties should at a minimum contain a descriptor assert_is_not_none(props.get('descriptor'))
def test_getWithEntityBundle(self, download_file_mock, get_file_URL_and_metadata_mock): # Note: one thing that remains unexplained is why the previous version of # this test worked if you had a .cacheMap file of the form: # {"/Users/chris/.synapseCache/663/-1337/anonymous": "2014-09-15T22:54:57.000Z", # "/var/folders/ym/p7cr7rrx4z7fw36sxv04pqh00000gq/T/tmpJ4nz8U": "2014-09-15T23:27:25.000Z"} # ...but failed if you didn't. bundle = { 'entity': { 'id': 'syn10101', 'name': 'anonymous', 'dataFileHandleId': '-1337', 'concreteType': 'org.sagebionetworks.repo.model.FileEntity', 'parentId': 'syn12345'}, 'fileHandles': [{ 'concreteType': 'org.sagebionetworks.repo.model.file.S3FileHandle', 'fileName': 'anonymous', 'contentType': 'application/flapdoodle', 'contentMd5': '1698d26000d60816caab15169efcd23a', 'id': '-1337'}], 'annotations': {}} fileHandle = bundle['fileHandles'][0]['id'] cacheDir = syn.cache.get_cache_dir(fileHandle) # Make sure the .cacheMap file does not already exist cacheMap = os.path.join(cacheDir, '.cacheMap') if os.path.exists(cacheMap): os.remove(cacheMap) def _downloadFileHandle(fileHandleId, objectId, objectType, path, retries=5): # touch file at path with open(path, 'a'): os.utime(path, None) os.path.split(path) syn.cache.add(fileHandle, path) return path def _getFileHandleDownload(fileHandleId, objectId, objectType='FileHandle'): return {'fileHandle': bundle['fileHandles'][0], 'fileHandleId': fileHandleId, 'preSignedURL': 'http://example.com'} download_file_mock.side_effect = _downloadFileHandle get_file_URL_and_metadata_mock.side_effect = _getFileHandleDownload # 1. ---------------------------------------------------------------------- # download file to an alternate location temp_dir1 = tempfile.mkdtemp() e = syn._getWithEntityBundle(entityBundle=bundle, downloadLocation=temp_dir1, ifcollision="overwrite.local") assert_equal(e.name, bundle["entity"]["name"]) assert_equal(e.parentId, bundle["entity"]["parentId"]) assert_equal(utils.normalize_path(os.path.abspath(os.path.dirname(e.path))), utils.normalize_path(temp_dir1)) assert_equal(bundle["fileHandles"][0]["fileName"], os.path.basename(e.path)) assert_equal(utils.normalize_path(os.path.abspath(e.path)), utils.normalize_path(os.path.join(temp_dir1, bundle["fileHandles"][0]["fileName"]))) # 2. ---------------------------------------------------------------------- # get without specifying downloadLocation e = syn._getWithEntityBundle(entityBundle=bundle, ifcollision="overwrite.local") assert_equal(e.name, bundle["entity"]["name"]) assert_equal(e.parentId, bundle["entity"]["parentId"]) assert_in(bundle["fileHandles"][0]["fileName"], e.files) # 3. ---------------------------------------------------------------------- # download to another location temp_dir2 = tempfile.mkdtemp() assert_not_equals(temp_dir2, temp_dir1) e = syn._getWithEntityBundle(entityBundle=bundle, downloadLocation=temp_dir2, ifcollision="overwrite.local") assert_in(bundle["fileHandles"][0]["fileName"], e.files) assert_is_not_none(e.path) assert_true(utils.equal_paths(os.path.dirname(e.path), temp_dir2)) # 4. ---------------------------------------------------------------------- # test preservation of local state url = 'http://foo.com/secretstuff.txt' # need to create a bundle with externalURL externalURLBundle = dict(bundle) externalURLBundle['fileHandles'][0]['externalURL'] = url e = File(name='anonymous', parentId="syn12345", synapseStore=False, externalURL=url) e.local_state({'zap': 'pow'}) e = syn._getWithEntityBundle(entityBundle=externalURLBundle, entity=e) assert_equal(e.local_state()['zap'], 'pow') assert_equal(e.synapseStore, False) assert_equal(e.externalURL, url)
def test_generates_pk(self): cert = Cert("Test") nt.assert_is_not_none(cert.private_key)
def test_get_openvpn_binary_not_none(mock): result = get_openvpn_binary() assert_is_not_none(result) eq_(result, '/usr/local/sbin/openvpn')
def test_get_openvpn_version_returns_correct_version(mock): result = get_openvpn_version() assert_is_not_none(result) eq_(result, '2.4.3')
def test_has_output(): assert_is_not_none(listdir(output_dir))
def test_application_routes(): assert_in('get', app._routes) assert_is_none(app.validate_route('get', '/')) assert_is_not_none(app.validate_route('post', '/')) assert_is_not_none(app.validate_route('get', '/1')) assert_equal('1', app.get_controller('get', '/')[0]())
def test_get_dispatcher_good(): """StreamAlertOutput - Get Valid Dispatcher""" dispatcher = StreamAlertOutput.get_dispatcher('aws-s3') assert_is_not_none(dispatcher)
def test_default_credentials_configured_credentials_is_not_none(): """Tests GoogleAuth.credentials gets initialized when default credentials are configured""" with patch('google.auth.default', return_value=(creds, 'project'), \ autospec=True): assert_equals(GoogleAuth().credentials, creds) assert_is_not_none(GoogleAuth().credentials)
def expired(conn, kind, text): mtime = get_query_mtime_bi_kind_and_text(conn, kind, text) assert_is_not_none(mtime) return mtime + timedelta(seconds=interval()) < datetime.utcnow()