def test_entity_view_add_annotation_columns(): folder1 = syn.store( Folder(name=str(uuid.uuid4()) + 'test_entity_view_add_annotation_columns_proj1', parent=project, annotations={ 'strAnno': 'str1', 'intAnno': 1, 'floatAnno': 1.1 })) folder2 = syn.store( Folder(name=str(uuid.uuid4()) + 'test_entity_view_add_annotation_columns_proj2', parent=project, annotations={ 'dateAnno': datetime.now(), 'strAnno': 'str2', 'intAnno': 2 })) schedule_for_cleanup(folder1) schedule_for_cleanup(folder2) scopeIds = [utils.id_of(folder1), utils.id_of(folder2)] entity_view = EntityViewSchema(name=str(uuid.uuid4()), scopeIds=scopeIds, addDefaultViewColumns=False, addAnnotationColumns=True, type='project', parent=project) syn.store(entity_view)
def _copyFolder(syn, entity, destinationId, mapping=None, skipCopyAnnotations=False, **kwargs): """ Copies synapse folders :param entity: A synapse ID of a Folder entity :param destinationId: Synapse ID of a project/folder that the folder wants to be copied to :param skipCopyAnnotations: Skips copying the annotations Default is False """ oldFolder = syn.get(entity) updateExisting = kwargs.get('updateExisting', False) if mapping is None: mapping = dict() # CHECK: If Folder name already exists, raise value error if not updateExisting: existingEntity = syn.findEntityId(oldFolder.name, parent=destinationId) if existingEntity is not None: raise ValueError('An entity named "%s" already exists in this location. Folder could not be copied' % oldFolder.name) newFolder = Folder(name=oldFolder.name, parent=destinationId) if not skipCopyAnnotations: newFolder.annotations = oldFolder.annotations newFolder = syn.store(newFolder) entities = syn.getChildren(entity) for ent in entities: _copyRecursive(syn, ent['id'], newFolder.id, mapping, skipCopyAnnotations=skipCopyAnnotations, **kwargs) return newFolder.id
def _copyFolder(syn, entity, destinationId, mapping=dict(), **kwargs): """ Copies synapse folders :param entity: A synapse ID of a Folder entity :param destinationId: Synapse ID of a project/folder that the folder wants to be copied to :param excludeTypes: Accepts a list of entity types (file, table, link) which determines which entity types to not copy. Defaults to an empty list. """ oldFolder = syn.get(entity) #CHECK: If Folder name already exists, raise value error search = syn.query('select name from entity where parentId == "%s"' % destinationId) for i in search['results']: if i['entity.name'] == oldFolder.name: raise ValueError('An item named "%s" already exists in this location. Folder could not be copied'%oldFolder.name) newFolder = Folder(name = oldFolder.name,parent= destinationId) newFolder.annotations = oldFolder.annotations newFolder = syn.store(newFolder) entities = syn.chunkedQuery('select id, name from entity where parentId=="%s"'% entity) for ent in entities: copied = _copyRecursive(syn, ent['entity.id'],newFolder.id,mapping, **kwargs) return(newFolder.id)
def test_walk(): walked = [] firstfile = utils.make_bogus_data_file() schedule_for_cleanup(firstfile) project_entity = syn.store(Project(name=str(uuid.uuid4()))) schedule_for_cleanup(project_entity.id) folder_entity = syn.store( Folder(name=str(uuid.uuid4()), parent=project_entity)) schedule_for_cleanup(folder_entity.id) second_folder = syn.store( Folder(name=str(uuid.uuid4()), parent=project_entity)) schedule_for_cleanup(second_folder.id) file_entity = syn.store(File(firstfile, parent=project_entity)) schedule_for_cleanup(file_entity.id) walked.append(((project_entity.name, project_entity.id), [ (folder_entity.name, folder_entity.id), (second_folder.name, second_folder.id) ], [(file_entity.name, file_entity.id)])) nested_folder = syn.store( Folder(name=str(uuid.uuid4()), parent=folder_entity)) schedule_for_cleanup(nested_folder.id) secondfile = utils.make_bogus_data_file() schedule_for_cleanup(secondfile) second_file = syn.store(File(secondfile, parent=nested_folder)) schedule_for_cleanup(second_file.id) thirdfile = utils.make_bogus_data_file() schedule_for_cleanup(thirdfile) third_file = syn.store(File(thirdfile, parent=second_folder)) schedule_for_cleanup(third_file.id) walked.append(((os.path.join(project_entity.name, folder_entity.name), folder_entity.id), [(nested_folder.name, nested_folder.id)], [])) walked.append( ((os.path.join(os.path.join(project_entity.name, folder_entity.name), nested_folder.name), nested_folder.id), [], [(second_file.name, second_file.id)])) walked.append(((os.path.join(project_entity.name, second_folder.name), second_folder.id), [], [(third_file.name, third_file.id)])) temp = synapseutils.walk(syn, project_entity.id) temp = list(temp) #Must sort the tuples returned, because order matters for the assert #Folders are returned in a different ordering depending on the name for i in walked: for x in i: if type(x) == list: x = x.sort() for i in temp: for x in i: if type(x) == list: x = x.sort() assert i in walked print("CHECK: synapseutils.walk on a file should return empty generator") temp = synapseutils.walk(syn, second_file.id) assert list(temp) == []
def dockerValidate(submission, syn, user, password): submissionJson = json.loads(submission['entityBundleJSON']) assert submissionJson['entity'].get('repositoryName') is not None, "Must submit a docker container" dockerRepo = submissionJson['entity']['repositoryName'].replace("docker.synapse.org/","") #assert dockerRepo.startswith("docker.synapse.org") assert submission.get('dockerDigest') is not None, "Must submit a docker container with a docker sha digest" dockerDigest = submission['dockerDigest'] index_endpoint = 'https://docker.synapse.org' #dockerImage = dockerRepo + "@" + dockerDigest #Check if docker is able to be pulled dockerRequestURL = '{0}/v2/{1}/manifests/{2}'.format(index_endpoint, dockerRepo, dockerDigest) token = getAuthToken(dockerRequestURL, user, password) resp = requests.get(dockerRequestURL, headers={'Authorization': 'Bearer %s' % token}) assert resp.status_code == 200, "Docker image + sha digest must exist" #Must check docker image size #Synapse docker registry dockerSize = sum([layer['size'] for layer in resp.json()['layers']]) assert dockerSize/1000000000.0 < 1000, "Docker image must be less than a teribyte" #Send email to me if harddrive is full #should be stateless, if there needs to be code changes to the docker agent preds = synu.walk(syn, CHALLENGE_PREDICTION_FOLDER) predFolders = preds.next()[1] predSynId = [synId for name, synId in predFolders if str(submission.id) == name] logs = synu.walk(syn, CHALLENGE_LOG_FOLDER) logsFolders = logs.next()[1] logsSynId = [synId for name, synId in logsFolders if str(submission.id) == name] if len(predSynId) == 0: predFolder = syn.store(Folder(submission.id, parent = CHALLENGE_PREDICTION_FOLDER)) predFolder = predFolder.id else: predFolder = predSynId[0] if len(logsSynId) == 0: logFolder = syn.store(Folder(submission.id, parent = CHALLENGE_LOG_FOLDER)) logFolder = logFolder.id for participant in submission.contributors: if participant['principalId'] in ADMIN_USER_IDS: access = ['CREATE', 'READ', 'DOWNLOAD', 'UPDATE', 'DELETE', 'CHANGE_PERMISSIONS', 'MODERATE', 'CHANGE_SETTINGS'] else: access = ['READ','DOWNLOAD'] #Comment set permissions out if you don't want to allow participants to see the pred files #syn.setPermissions(predFolder, principalId = participant['principalId'], accessType = access) syn.setPermissions(logFolder, principalId = participant['principalId'], accessType = access) else: logFolder = logsSynId[0] #Add more message if you want to return the prediction files return(True, "Your submission has been validated! As your submission is being ran, please go here: https://www.synapse.org/#!Synapse:%s to check on your log file." % logFolder)
def getParentFolder(syn, project, meta): fid = find_child(syn, project, meta['annotations']['acronym']) if fid is None: folder = syn.store( Folder(name=meta['annotations']['acronym'], parentId=project)) fid = folder.id pid = find_child(syn, fid, meta['annotations']['platform']) if pid is None: folder = syn.store( Folder(name=meta['annotations']['platform'], parentId=fid)) pid = folder.id return pid
def test_entity_view_add_annotation_columns(syn, project, schedule_for_cleanup): folder1 = syn.store( Folder(name=str(uuid.uuid4()) + 'test_entity_view_add_annotation_columns_proj1', parent=project, annotations={ 'strAnno': 'str1', 'intAnno': 1, 'floatAnno': 1.1 })) folder2 = syn.store( Folder(name=str(uuid.uuid4()) + 'test_entity_view_add_annotation_columns_proj2', parent=project, annotations={ 'dateAnno': datetime.now(), 'strAnno': 'str2', 'intAnno': 2 })) schedule_for_cleanup(folder1) schedule_for_cleanup(folder2) scopeIds = [utils.id_of(folder1), utils.id_of(folder2)] # This test is to ensure that user code which use the deprecated field `type` continue to work # TODO: remove this test case in Synapse Python client 2.0 entity_view = EntityViewSchema(name=str(uuid.uuid4()), scopeIds=scopeIds, addDefaultViewColumns=False, addAnnotationColumns=True, type='project', parent=project) syn.store(entity_view) # This test is to ensure that user code which use the deprecated field `type` continue to work # TODO: remove this test case in Synapse Python client 2.0 entity_view = EntityViewSchema(name=str(uuid.uuid4()), scopeIds=scopeIds, addDefaultViewColumns=False, addAnnotationColumns=True, type='file', includeEntityTypes=[EntityViewType.PROJECT], parent=project) syn.store(entity_view) entity_view = EntityViewSchema(name=str(uuid.uuid4()), scopeIds=scopeIds, addDefaultViewColumns=False, addAnnotationColumns=True, includeEntityTypes=[EntityViewType.PROJECT], parent=project) syn.store(entity_view)
def test_syncFromSynapse__project_contains_empty_folder(syn): project = Project(name="the project", parent="whatever", id="syn123") file = File(name="a file", parent=project, id="syn456") folder = Folder(name="a folder", parent=project, id="syn789") entities = { file.id: file, folder.id: folder, } def syn_get_side_effect(entity, *args, **kwargs): return entities[id_of(entity)] with patch.object(syn, "getChildren", side_effect=[[folder, file], []]) as patch_syn_get_children,\ patch.object(syn, "get", side_effect=syn_get_side_effect) as patch_syn_get: assert [file] == synapseutils.syncFromSynapse(syn, project) expected_get_children_agrs = [call(project['id']), call(folder['id'])] assert expected_get_children_agrs == patch_syn_get_children.call_args_list patch_syn_get.assert_called_once_with( file['id'], downloadLocation=None, ifcollision='overwrite.local', followLink=False, downloadFile=True, )
def test_syncFromSynapse(): """This function tests recursive download as defined in syncFromSynapse most of the functionality of this function are already tested in the tests/integration/test_command_line_client::test_command_get_recursive_and_query which means that the only test if for path=None """ # Create a Project project_entity = syn.store(synapseclient.Project(name=str(uuid.uuid4()))) schedule_for_cleanup(project_entity.id) # Create a Folder in Project folder_entity = syn.store( Folder(name=str(uuid.uuid4()), parent=project_entity)) # Create and upload two files in Folder uploaded_paths = [] for i in range(2): f = utils.make_bogus_data_file() uploaded_paths.append(f) schedule_for_cleanup(f) syn.store(File(f, parent=folder_entity)) # Add a file in the project level as well f = utils.make_bogus_data_file() uploaded_paths.append(f) schedule_for_cleanup(f) syn.store(File(f, parent=project_entity)) # Test recursive get output = synapseutils.syncFromSynapse(syn, project_entity) assert_equals(len(output), len(uploaded_paths)) for f in output: assert_in(f.path, uploaded_paths)
def get_or_create_folder(self, dirpath, parent_synid): dirs = dirpath.parts folder_synid = parent_synid for d in dirs: folder = Folder(d, parent=folder_synid) folder_synid = self.get_or_create_entity(folder) return folder_synid
def setup(module): module.syn = integration.syn module.project = syn.store(Project(name=str(uuid.uuid4()))) schedule_for_cleanup(module.project) module.folder = syn.store( Folder(name=str(uuid.uuid4()), parent=module.project)) # Create testfiles for upload module.f1 = utils.make_bogus_data_file(n=10) module.f2 = utils.make_bogus_data_file(n=10) f3 = 'https://www.synapse.org' schedule_for_cleanup(module.f1) schedule_for_cleanup(module.f2) module.header = 'path parent used executed activityName synapseStore foo\n' module.row1 = '%s %s %s "%s;https://www.example.com" provName bar\n' % ( f1, project.id, f2, f3) module.row2 = '%s %s "syn12" "syn123;https://www.example.com" provName2 bar\n' % ( f2, folder.id) module.row3 = '%s %s "syn12" prov2 False baz\n' % (f3, folder.id) module.row4 = '%s %s %s act 2\n' % (f3, project.id, f1 ) # Circular reference module.row5 = '%s syn12 \n' % (f3) # Wrong parent
def test_getChildren(syn, schedule_for_cleanup): # setup a hierarchy for folders # PROJECT # | \ # File Folder # | # File project_name = str(uuid.uuid1()) test_project = syn.store(Project(name=project_name)) folder = syn.store(Folder(name="firstFolder", parent=test_project)) syn.store( File(path="~/doesntMatter.txt", name="file inside folders", parent=folder, synapseStore=False)) project_file = syn.store( File(path="~/doesntMatterAgain.txt", name="file inside project", parent=test_project, synapseStore=False)) schedule_for_cleanup(test_project) expected_id_set = {project_file.id, folder.id} children_id_set = {x['id'] for x in syn.getChildren(test_project.id)} assert expected_id_set == children_id_set
def test_syncFromSynapse__downloadFile_is_false(syn): """ Verify when passing the argument downloadFile is equal to False, syncFromSynapse won't download the file to clients' local end. """ project = Project(name="the project", parent="whatever", id="syn123") file = File(name="a file", parent=project, id="syn456") folder = Folder(name="a folder", parent=project, id="syn789") entities = { file.id: file, folder.id: folder, } def syn_get_side_effect(entity, *args, **kwargs): return entities[id_of(entity)] with patch.object(syn, "getChildren", side_effect=[[folder, file], []]),\ patch.object(syn, "get", side_effect=syn_get_side_effect) as patch_syn_get: synapseutils.syncFromSynapse(syn, project, downloadFile=False) patch_syn_get.assert_called_once_with( file['id'], downloadLocation=None, ifcollision='overwrite.local', followLink=False, downloadFile=False, )
def test_syncFromSynapse__folder_contains_one_file(syn): folder = Folder(name="the folder", parent="whatever", id="syn123") file = File(name="a file", parent=folder, id="syn456") with patch.object(syn, "getChildren", return_value=[file]) as patch_syn_get_children,\ patch.object(syn, "get", return_value=file): assert [file] == synapseutils.syncFromSynapse(syn, folder) patch_syn_get_children.called_with(folder['id'])
def test_get_local_file(): """Tests synapse.get() with local a local file """ new_path = utils.make_bogus_data_file() schedule_for_cleanup(new_path) folder = Folder('TestFindFileFolder', parent=project, description='A place to put my junk') folder = syn.createEntity(folder) #Get an nonexistent file in Synapse assert_raises(SynapseError, syn.get, new_path) #Get a file really stored in Synapse ent_folder = syn.store(File(new_path, parent=folder)) ent2 = syn.get(new_path) assert ent_folder.id==ent2.id and ent_folder.versionNumber==ent2.versionNumber #Get a file stored in Multiple locations #should display warning ent = syn.store(File(new_path, parent=project)) ent = syn.get(new_path) #Get a file stored in multiple locations with limit set ent = syn.get(new_path, limitSearch=folder.id) assert ent.id == ent_folder.id and ent.versionNumber==ent_folder.versionNumber #Get a file that exists but such that limitSearch removes them and raises error assert_raises(SynapseError, syn.get, new_path, limitSearch='syn1')
def test_dispose(syn_client, syn_test_helper, new_temp_file): project = syn_client.store(Project(name=syn_test_helper.uniq_name())) folder = syn_client.store( Folder(name=syn_test_helper.uniq_name(prefix='Folder '), parent=project)) file = syn_client.store( File(name=syn_test_helper.uniq_name(prefix='File '), path=new_temp_file, parent=folder)) syn_objects = [project, folder, file] for syn_obj in syn_objects: syn_test_helper.dispose_of(syn_obj) assert syn_obj in syn_test_helper._trash syn_test_helper.dispose() assert len(syn_test_helper._trash) == 0 for syn_obj in syn_objects: with pytest.raises(synapseclient.exceptions.SynapseHTTPError) as ex: syn_client.get(syn_obj, downloadFile=False) err_str = str(ex.value) assert "Not Found" in err_str or "cannot be found" in err_str or "is in trash can" in err_str or "does not exist" in err_str try: os.remove(new_temp_file) except: pass
def setUpSynapseProject(foldersToCreate, syn, pid=None, pname=None): '''Creates Synapse project and necessary folders for the dataset.''' # Create a set of sub-folders expected for this project folderSchemaSet = set(foldersToCreate) # Get the project if it exists or create if pid == None: project = Project(pname) project = syn.store(project) else: project = syn.get(pid) print '%s' % project.name existingFolders = getExistingFolders(syn, project.id) if len(existingFolders) > 0: foldersToCreate = folderSchemaSet.difference( existingFolders.keys()) # create the folders that don't exist for name in foldersToCreate: createFolder = Folder(name, parent=project.id) createFolder = syn.store(createFolder) existingFolders[name] = createFolder.id return (project, existingFolders)
def _view_setup(cls): # set up a file view folder = syn.store( Folder(name="PartialRowTestFolder" + str(uuid.uuid4()), parent=project)) syn.store( File("~/path/doesnt/matter", name="f1", parent=folder, synapseStore=False)) syn.store( File("~/path/doesnt/matter/again", name="f2", parent=folder, synapseStore=False)) cols = [ Column(name='foo', columnType='STRING', maximumSize=1000), Column(name='bar', columnType='STRING') ] return syn.store( EntityViewSchema(name='PartialRowTestViews' + str(uuid.uuid4()), columns=cols, addDefaultViewColumns=False, parent=project, scopes=[folder]))
def test_syncFromSynapse__manifest_is_root( mock__get_file_entity_provenance_dict, mock_generateManifest, syn): """ Verify manifest argument equal to "root" that pass in to syncFromSynapse, it will create root_manifest file only. """ project = Project(name="the project", parent="whatever", id="syn123") file1 = File(name="a file", parent=project, id="syn456") folder = Folder(name="a folder", parent=project, id="syn789") file2 = File(name="a file2", parent=folder, id="syn789123") # Structure of nested project # project # |---> file1 # |---> folder # |---> file2 entities = { file1.id: file1, folder.id: folder, file2.id: file2, } def syn_get_side_effect(entity, *args, **kwargs): return entities[id_of(entity)] mock__get_file_entity_provenance_dict.return_value = {} with patch.object(syn, "getChildren", side_effect=[[folder, file1], [file2]]),\ patch.object(syn, "get", side_effect=syn_get_side_effect) as patch_syn_get: synapseutils.syncFromSynapse(syn, project, path="./", downloadFile=False, manifest="root") assert patch_syn_get.call_args_list == [ call( file1['id'], downloadLocation="./", ifcollision='overwrite.local', followLink=False, downloadFile=False, ), call( file2['id'], downloadLocation="./a folder", ifcollision='overwrite.local', followLink=False, downloadFile=False, ) ] assert mock_generateManifest.call_count == 1 call_files = mock_generateManifest.call_args_list[0][0][1] assert len(call_files) == 2 assert call_files[0].id == "syn456" assert call_files[1].id == "syn789123"
def mutate(self, info, name, permissions, annotations, wiki, folders, posts): # Build the annotations project_annotations = {} if annotations: for annotation in annotations: project_annotations[annotation['key']] = annotation['value'] # Create the Project project = Synapse.client().store( Project(name=name, annotations=project_annotations) ) # Add the permissions if permissions: for permission in permissions: principal_id = permission['principal_id'] access = permission['access'] access_type = getattr(Synapse, '{0}_PERMS'.format(access)) Synapse.client().setPermissions( project, principal_id, accessType=access_type, warn_if_inherits=False ) # Add the the folders if folders: for folder_name in folders: Synapse.client().store(Folder(name=folder_name, parent=project)) # Add the posts if posts: forum_id = Synapse.client().restGET( '/project/{0}/forum'.format(project.id)).get('id') for post in posts: body = { 'forumId': forum_id, 'title': post['title'], 'messageMarkdown': post['message_markdown'] } Synapse.client().restPOST("/thread", body=json.dumps(body)) # Add the wiki if wiki: Synapse.client().store(Wiki(title=wiki.title, markdown=wiki.markdown, owner=project)) new_syn_project = SynProject.from_project(project) return CreateSynProject(syn_project=new_syn_project)
def test_syncFromSynapse_Links(): """This function tests recursive download of links as defined in syncFromSynapse most of the functionality of this function are already tested in the tests/integration/test_command_line_client::test_command_get_recursive_and_query which means that the only test if for path=None """ # Create a Project project_entity = syn.store(synapseclient.Project(name=str(uuid.uuid4()))) schedule_for_cleanup(project_entity.id) # Create a Folder in Project folder_entity = syn.store( Folder(name=str(uuid.uuid4()), parent=project_entity)) # Create a Folder hiearchy in folder_entity inner_folder_entity = syn.store( Folder(name=str(uuid.uuid4()), parent=folder_entity)) second_folder_entity = syn.store( Folder(name=str(uuid.uuid4()), parent=project_entity)) # Create and upload two files in Folder uploaded_paths = [] for i in range(2): f = utils.make_bogus_data_file() uploaded_paths.append(f) schedule_for_cleanup(f) file_entity = syn.store(File(f, parent=project_entity)) # Create links to inner folder syn.store(Link(file_entity.id, parent=folder_entity)) #Add a file in the project level as well f = utils.make_bogus_data_file() uploaded_paths.append(f) schedule_for_cleanup(f) file_entity = syn.store(File(f, parent=second_folder_entity)) # Create link to inner folder syn.store(Link(file_entity.id, parent=inner_folder_entity)) ### Test recursive get output = synapseutils.syncFromSynapse(syn, folder_entity, followLink=True) assert len(output) == len(uploaded_paths) for f in output: assert f.path in uploaded_paths
def create_folder(self, **kwargs): """Creates a new Folder and adds it to the trash queue.""" if 'name' not in kwargs: kwargs['name'] = self.uniq_name(prefix=kwargs.get('prefix', '')) kwargs.pop('prefix', None) folder = SynapseProxy.client().store(Folder(**kwargs)) self.dispose_of(folder) return folder
def test_move(): assert_raises(SynapseFileNotFoundError, syn.move, "abc", "syn123") entity = Folder(name="folder", parent="syn456") moved_entity = entity moved_entity.parentId = "syn789" with patch.object(syn, "get", return_value=entity) as syn_get_patch,\ patch.object(syn, "store", return_value=moved_entity) as syn_store_patch: assert_equal(moved_entity, syn.move("syn123", "syn789")) syn_get_patch.assert_called_once_with("syn123", downloadFile=False) syn_store_patch.assert_called_once_with(moved_entity, forceVersion=False)
def test_findEntityIdByNameAndParent__with_parent(): entity_name = "Kappa 123" parentId = "syn42" parent_entity = Folder(name="wwwwwwwwwwwwwwwwwwwwww@@@@@@@@@@@@@@@@", id=parentId, parent="fakeParent") expected_uri = "/entity/child" expected_body = json.dumps({"parentId": parentId, "entityName": entity_name}) expected_id = "syn1234" return_val = {'id': expected_id} with patch.object(syn, "restPOST", return_value=return_val) as mocked_POST: entity_id = syn.findEntityId(entity_name, parent_entity) mocked_POST.assert_called_once_with(expected_uri, body=expected_body) assert_equal(expected_id, entity_id)
async def find_or_create_folder(self, path, synapse_parent, is_remote_only=False): synapse_folder = None if not synapse_parent: self.log_error('Parent not found, cannot create folder: {0}'.format(path)) return synapse_folder folder_name = os.path.basename(path) bad_name_chars = Utils.get_invalid_synapse_name_chars(folder_name) if bad_name_chars: self.log_error( 'Folder name: "{0}" contains invalid characters: "{1}"'.format(path, ''.join(bad_name_chars))) return synapse_folder full_synapse_path = self.get_synapse_path(folder_name, synapse_parent) syn_folder_id = await SynapseProxy.findEntityIdAsync(folder_name, parent=synapse_parent) if syn_folder_id: synapse_folder = await SynapseProxy.getAsync(syn_folder_id, downloadFile=False) self.set_synapse_parent(synapse_folder) self.add_processed_path(path) self.write_csv_line(path, full_synapse_path, synapse_folder.id, is_remote_only=is_remote_only) logging.info('[Folder EXISTS]: {0} -> {1}'.format(path, full_synapse_path)) else: max_attempts = 5 attempt_number = 0 exception = None while attempt_number < max_attempts and not synapse_folder: try: attempt_number += 1 exception = None synapse_folder = await SynapseProxy.storeAsync( Folder(name=folder_name, parent=synapse_parent), forceVersion=False) except Exception as ex: exception = ex self.log_error('[Folder ERROR] {0} -> {1} : {2}'.format(path, full_synapse_path, str(ex))) if attempt_number < max_attempts: sleep_time = random.randint(1, 5) logging.info('[Folder RETRY in {0}s] {1} -> {2}'.format(sleep_time, path, full_synapse_path)) asyncio.sleep(sleep_time) if exception: self.log_error('[Folder FAILED] {0} -> {1} : {2}'.format(path, full_synapse_path, str(exception))) else: self.add_processed_path(path) self.write_csv_line(path, full_synapse_path, synapse_folder.id, is_remote_only=is_remote_only) logging.info('[Folder CREATED] {0} -> {1}'.format(path, full_synapse_path)) self.set_synapse_parent(synapse_folder) return synapse_folder
def createAndSendReport(email, apiKey, recipientIds, parentFolder): syn = synapseclient.Synapse() syn.login(email=email, apiKey=apiKey) print("Making call to Synapse to generate a storage report") response = syn.restPOST("/storageReport/async/start", body="{\n\t\"reportType\":\"ALL_PROJECTS\"\n}") status = syn.restGET("/storageReport/async/get/" + response['token']) sleepDuration = 1 while ('jobState' in status): if (status['jobState'] == 'FAILED'): sys.exit("Job failed. More info:\n" + status.errorMessage + "\n" + status.errorDetails) else: print("Job is still processing. Sleeping for " + str(sleepDuration) + " seconds...") sleep(sleepDuration) sleepDuration = sleepDuration * 2 status = syn.restGET("/storageReport/async/get/" + response['token']) print( "Job complete. Creating an entity in Synapse with the report file handle." ) # Put the report in a folder named the current year, 'YYYY'. Create the folder if it doesn't exist. childFolder = syn.findEntityId(currentYear, parent=parentFolder) if (childFolder is None): childFolder = Folder(currentYear, parent=parentFolder) childFolder = syn.store(childFolder) reportName = "synapse-storage-stats_" + currentDate + ".csv" report = File(path=None, name=reportName, parent=childFolder, dataFileHandleId=status['resultsFileHandleId']) report = syn.store(report) # Change the "downloadAs" name for convenience. Otherwise it will be Job-<job-id>.csv synapseutils.changeFileMetaData(syn, report['id'], downloadAs=reportName, contentType="text/csv") print("Sending notification to the specified recipients.") syn.sendMessage(recipientIds, "Synapse Storage Report for " + currentDate, "<a href=\"https://www.synapse.org/#!Synapse:" + report['id'] + "\">Click here to view the report.</a>", contentType="text/html") print("Job complete. Exiting!") exit(0)
def create_folder_in_synapse(self, path): log_line = 'Processing Folder: {0}'.format(path) full_synapse_path, synapse_parent, folder_name = self.to_synapse_path( path) log_line += '\n -> {0}'.format(full_synapse_path) logging.info(log_line) synapse_folder = Folder(folder_name, parent=synapse_parent) if self._dry_run: # Give the folder a fake id so it doesn't blow up when this folder is used as a parent. synapse_folder.id = 'syn0' else: synapse_folder = self._synapse_client.store(synapse_folder, forceVersion=False) self.set_synapse_folder(full_synapse_path, synapse_folder) return synapse_folder
def test_syncFromSynapse__project_contains_empty_folder(): project = Project(name="the project", parent="whatever", id="syn123") file = File(name="a file", parent=project, id="syn456") folder = Folder(name="a folder", parent=project, id="syn789") with patch.object(syn, "getChildren", side_effect=[[folder, file], []]) as patch_syn_get_children,\ patch.object(syn, "get", side_effect=[folder, file]) as patch_syn_get: assert_equals([file], synapseutils.syncFromSynapse(syn, project)) expected_get_children_agrs = [call(project['id']), call(folder['id'])] assert_list_equal(expected_get_children_agrs, patch_syn_get_children.call_args_list) expected_get_args = [ call(folder['id'], downloadLocation=None, ifcollision='overwrite.local', followLink=False), call(file['id'], downloadLocation=None, ifcollision='overwrite.local', followLink=False)] assert_list_equal(expected_get_args, patch_syn_get.call_args_list)
def test_special_characters(): folder = syn.store(Folder(u'Special Characters Here', parent=project, description=u'A test for special characters such as Déjà vu, ประเทศไทย, and 中国', hindi_annotation=u'बंदर बट', russian_annotation=u'Обезьяна прикладом', weird_german_thing=u'Völlerei lässt grüßen')) assert folder.name == u'Special Characters Here' assert folder.parentId == project.id assert folder.description == u'A test for special characters such as Déjà vu, ประเทศไทย, and 中国', u'description= %s' % folder.description assert folder.weird_german_thing[0] == u'Völlerei lässt grüßen' assert folder.hindi_annotation[0] == u'बंदर बट' assert folder.russian_annotation[0] == u'Обезьяна прикладом'
def test_setPermissions__default_permissions(): entity = Folder(name="folder", parent="syn456", id="syn1") principalId = 123 acl = { 'resourceAccess': [] } update_acl = { 'resourceAccess': [{u'accessType': ["READ", "DOWNLOAD"], u'principalId': principalId}] } with patch.object(syn, "_getBenefactor", return_value=entity), \ patch.object(syn, "_getACL", return_value=acl), \ patch.object(syn, "_storeACL", return_value=update_acl) as patch_store_acl: assert_equal(update_acl, syn.setPermissions(entity, principalId)) patch_store_acl.assert_called_once_with(entity, update_acl)
def test_dispose(syn_client, syn_test_helper, temp_file): project = syn_client.store(Project(name=syn_test_helper.uniq_name())) folder = syn_client.store( Folder(name=syn_test_helper.uniq_name(prefix='Folder '), parent=project)) file = syn_client.store( File(name=syn_test_helper.uniq_name(prefix='File '), path=temp_file, parent=folder)) team = syn_client.store( Team(name=syn_test_helper.uniq_name(prefix='Team '))) wiki = syn_client.store( Wiki(title=syn_test_helper.uniq_name(prefix='Wiki '), owner=project)) wikiChild = syn_client.store( Wiki(title=syn_test_helper.uniq_name(prefix='Wiki Child '), owner=project, parentWikiId=wiki.id)) syn_objects = [project, folder, file, team, wiki, wikiChild] for syn_obj in syn_objects: syn_test_helper.dispose_of(syn_obj) assert syn_obj in syn_test_helper._trash syn_test_helper.dispose() assert len(syn_test_helper._trash) == 0 for syn_obj in syn_objects: with pytest.raises( synapseclient.core.exceptions.SynapseHTTPError) as ex: if isinstance(syn_obj, Wiki): syn_client.getWiki(syn_obj) elif isinstance(syn_obj, Team): syn_client.getTeam(syn_obj.id) else: syn_client.get(syn_obj, downloadFile=False) err_str = str(ex.value) assert "Not Found" in err_str or "cannot be found" in err_str or "is in trash can" in err_str or "does not exist" in err_str try: os.remove(temp_file) except: pass
def test_Entity(): # Update the project project_name = str(uuid.uuid4()) project = Project(name=project_name) project = syn.store(project) schedule_for_cleanup(project) project = syn.getEntity(project) assert project.name == project_name # Create and get a Folder folder = Folder('Test Folder', parent=project, description='A place to put my junk', foo=1000) folder = syn.createEntity(folder) folder = syn.getEntity(folder) assert folder.name == 'Test Folder' assert folder.parentId == project.id assert folder.description == 'A place to put my junk' assert folder.foo[0] == 1000 # Update and get the Folder folder.pi = 3.14159265359 folder.description = 'The rejects from the other folder' folder = syn.store(folder) folder = syn.get(folder) assert folder.name == 'Test Folder' assert folder.parentId == project.id assert folder.description == 'The rejects from the other folder' assert folder.pi[0] == 3.14159265359 # Test CRUD on Files, check unicode path = utils.make_bogus_data_file() schedule_for_cleanup(path) a_file = File(path, parent=folder, description=u'Description with funny characters: Déjà vu, ประเทศไทย, 中国', contentType='text/flapdoodle', foo='An arbitrary value', bar=[33,44,55], bday=Datetime(2013,3,15), band=u"Motörhead", lunch=u"すし") a_file = syn.store(a_file) assert a_file.path == path a_file = syn.getEntity(a_file) assert a_file.description == u'Description with funny characters: Déjà vu, ประเทศไทย, 中国', u'description= %s' % a_file.description assert a_file['foo'][0] == 'An arbitrary value', u'foo= %s' % a_file['foo'][0] assert a_file['bar'] == [33,44,55] assert a_file['bday'][0] == Datetime(2013,3,15) assert a_file.contentType == 'text/flapdoodle', u'contentType= %s' % a_file.contentType assert a_file['band'][0] == u"Motörhead", u'band= %s' % a_file['band'][0] assert a_file['lunch'][0] == u"すし", u'lunch= %s' % a_file['lunch'][0] a_file = syn.downloadEntity(a_file) assert filecmp.cmp(path, a_file.path) assert_raises(ValueError,File,a_file.path,parent=folder,dataFileHandleId=56456) b_file = File(name="blah",parent=folder,dataFileHandleId=a_file.dataFileHandleId) b_file = syn.store(b_file) assert b_file.dataFileHandleId == a_file.dataFileHandleId # Update the File a_file.path = path a_file['foo'] = 'Another arbitrary chunk of text data' a_file['new_key'] = 'A newly created value' a_file = syn.updateEntity(a_file) assert a_file['foo'][0] == 'Another arbitrary chunk of text data' assert a_file['bar'] == [33,44,55] assert a_file['bday'][0] == Datetime(2013,3,15) assert a_file.new_key[0] == 'A newly created value' assert a_file.path == path assert a_file.versionNumber == 1, "unexpected version number: " + str(a_file.versionNumber) #Test create, store, get Links link = Link(a_file['id'], targetVersion=a_file.versionNumber, parent=project) link = syn.store(link) assert link['linksTo']['targetId'] == a_file['id'] assert link['linksTo']['targetVersionNumber'] == a_file.versionNumber assert link['linksToClassName'] == a_file['concreteType'] testLink = syn.get(link) assert testLink == link link = syn.get(link,followLink= True) assert link['foo'][0] == 'Another arbitrary chunk of text data' assert link['bar'] == [33,44,55] assert link['bday'][0] == Datetime(2013,3,15) assert link.new_key[0] == 'A newly created value' assert utils.equal_paths(link.path, path) assert link.versionNumber == 1, "unexpected version number: " + str(a_file.versionNumber) # Upload a new File and verify new_path = utils.make_bogus_data_file() schedule_for_cleanup(new_path) a_file = syn.uploadFile(a_file, new_path) a_file = syn.downloadEntity(a_file) assert filecmp.cmp(new_path, a_file.path) assert a_file.versionNumber == 2 # Make sure we can still get the older version of file old_random_data = syn.get(a_file.id, version=1) assert filecmp.cmp(old_random_data.path, path) tmpdir = tempfile.mkdtemp() schedule_for_cleanup(tmpdir) ## test file name override a_file.fileNameOverride = "peaches_en_regalia.zoinks" syn.store(a_file) ## TODO We haven't defined how filename override interacts with ## TODO previously cached files so, side-step that for now by ## TODO making sure the file is not in the cache! syn.cache.remove(a_file.dataFileHandleId, delete=True) a_file_retreived = syn.get(a_file, downloadLocation=tmpdir) assert os.path.basename(a_file_retreived.path) == a_file.fileNameOverride, os.path.basename(a_file_retreived.path) ## test getting the file from the cache with downloadLocation parameter (SYNPY-330) a_file_cached = syn.get(a_file.id, downloadLocation=tmpdir) assert a_file_cached.path is not None assert os.path.basename(a_file_cached.path) == a_file.fileNameOverride, a_file_cached.path print("\n\nList of files in project:\n") syn._list(project, recursive=True)
def test_Entity(): # Update the project project_name = str(uuid.uuid4()) project = Project(name=project_name) project = syn.store(project) schedule_for_cleanup(project) project = syn.getEntity(project) assert_equals(project.name, project_name) # Create and get a Folder folder = Folder('Test Folder', parent=project, description='A place to put my junk', foo=1000) folder = syn.createEntity(folder) folder = syn.getEntity(folder) assert_equals(folder.name, 'Test Folder') assert_equals(folder.parentId, project.id) assert_equals(folder.description, 'A place to put my junk') assert_equals(folder.foo[0], 1000) # Update and get the Folder folder.pi = 3.14159265359 folder.description = 'The rejects from the other folder' folder = syn.store(folder) folder = syn.get(folder) assert_equals(folder.name, 'Test Folder') assert_equals(folder.parentId, project.id) assert_equals(folder.description, 'The rejects from the other folder') assert_equals(folder.pi[0], 3.14159265359) # Test CRUD on Files, check unicode path = utils.make_bogus_data_file() schedule_for_cleanup(path) a_file = File(path, parent=folder, description=u'Description with funny characters: Déjà vu, ประเทศไทย, 中国', contentType='text/flapdoodle', foo='An arbitrary value', bar=[33, 44, 55], bday=Datetime(2013, 3, 15), band=u"Motörhead", lunch=u"すし") a_file = syn.store(a_file) assert_equals(a_file.path, path) a_file = syn.getEntity(a_file) assert_equals(a_file.description, u'Description with funny characters: Déjà vu, ประเทศไทย, 中国', u'description= %s' % a_file.description) assert_equals(a_file['foo'][0], 'An arbitrary value', u'foo= %s' % a_file['foo'][0]) assert_equals(a_file['bar'], [33, 44, 55]) assert_equals(a_file['bday'][0], Datetime(2013, 3, 15)) assert_equals(a_file.contentType, 'text/flapdoodle', u'contentType= %s' % a_file.contentType) assert_equals(a_file['band'][0], u"Motörhead", u'band= %s' % a_file['band'][0]) assert_equals(a_file['lunch'][0], u"すし", u'lunch= %s' % a_file['lunch'][0]) a_file = syn.downloadEntity(a_file) assert_true(filecmp.cmp(path, a_file.path)) b_file = File(name="blah", parent=folder, dataFileHandleId=a_file.dataFileHandleId) b_file = syn.store(b_file) assert_equals(b_file.dataFileHandleId, a_file.dataFileHandleId) # Update the File a_file.path = path a_file['foo'] = 'Another arbitrary chunk of text data' a_file['new_key'] = 'A newly created value' a_file = syn.updateEntity(a_file) assert_equals(a_file['foo'][0], 'Another arbitrary chunk of text data') assert_equals(a_file['bar'], [33, 44, 55]) assert_equals(a_file['bday'][0], Datetime(2013, 3, 15)) assert_equals(a_file.new_key[0], 'A newly created value') assert_equals(a_file.path, path) assert_equals(a_file.versionNumber, 1, "unexpected version number: " + str(a_file.versionNumber)) # Test create, store, get Links # If version isn't specified, targetVersionNumber should not be set link = Link(a_file['id'], parent=project) link = syn.store(link) assert_equals(link['linksTo']['targetId'], a_file['id']) assert_is_none(link['linksTo'].get('targetVersionNumber')) assert_equals(link['linksToClassName'], a_file['concreteType']) link = Link(a_file['id'], targetVersion=a_file.versionNumber, parent=project) link = syn.store(link) assert_equals(link['linksTo']['targetId'], a_file['id']) assert_equals(link['linksTo']['targetVersionNumber'], a_file.versionNumber) assert_equals(link['linksToClassName'], a_file['concreteType']) testLink = syn.get(link) assert_equals(testLink, link) link = syn.get(link, followLink=True) assert_equals(link['foo'][0], 'Another arbitrary chunk of text data') assert_equals(link['bar'], [33, 44, 55]) assert_equals(link['bday'][0], Datetime(2013, 3, 15)) assert_equals(link.new_key[0], 'A newly created value') assert_true(utils.equal_paths(link.path, path)) assert_equals(link.versionNumber, 1, "unexpected version number: " + str(a_file.versionNumber)) newfolder = Folder('Testing Folder', parent=project) newfolder = syn.store(newfolder) link = Link(newfolder, parent=folder.id) link = syn.store(link) assert_equals(link['linksTo']['targetId'], newfolder.id) assert_equals(link['linksToClassName'], newfolder['concreteType']) assert_is_none(link['linksTo'].get('targetVersionNumber')) # Upload a new File and verify new_path = utils.make_bogus_data_file() schedule_for_cleanup(new_path) a_file = syn.uploadFile(a_file, new_path) a_file = syn.downloadEntity(a_file) assert_true(filecmp.cmp(new_path, a_file.path)) assert_equals(a_file.versionNumber, 2) # Make sure we can still get the older version of file old_random_data = syn.get(a_file.id, version=1) assert_true(filecmp.cmp(old_random_data.path, path)) tmpdir = tempfile.mkdtemp() schedule_for_cleanup(tmpdir) # test getting the file from the cache with downloadLocation parameter (SYNPY-330) a_file_cached = syn.get(a_file.id, downloadLocation=tmpdir) assert_is_not_none(a_file_cached.path) assert_equal(os.path.basename(a_file_cached.path), os.path.basename(a_file.path))
def test_Entity(): # Update the project project_name = str(uuid.uuid4()) project = Project(name=project_name) project = syn.store(project) schedule_for_cleanup(project) project = syn.getEntity(project) assert project.name == project_name # Create and get a Folder folder = Folder("Test Folder", parent=project, description="A place to put my junk", foo=1000) folder = syn.createEntity(folder) folder = syn.getEntity(folder) assert folder.name == "Test Folder" assert folder.parentId == project.id assert folder.description == "A place to put my junk" assert folder.foo[0] == 1000 # Update and get the Folder folder.pi = 3.14159265359 folder.description = "The rejects from the other folder" folder = syn.store(folder) folder = syn.get(folder) assert folder.name == "Test Folder" assert folder.parentId == project.id assert folder.description == "The rejects from the other folder" assert folder.pi[0] == 3.14159265359 # Test CRUD on Files, check unicode path = utils.make_bogus_data_file() schedule_for_cleanup(path) a_file = File( path, parent=folder, description="Description with funny characters: Déjà vu, ประเทศไทย, 中国", contentType="text/flapdoodle", foo="An arbitrary value", bar=[33, 44, 55], bday=Datetime(2013, 3, 15), band="Motörhead", lunch="すし", ) a_file = syn.store(a_file) assert a_file.path == path a_file = syn.getEntity(a_file) assert a_file.description == "Description with funny characters: Déjà vu, ประเทศไทย, 中国", ( "description= %s" % a_file.description ) assert a_file["foo"][0] == "An arbitrary value", "foo= %s" % a_file["foo"][0] assert a_file["bar"] == [33, 44, 55] assert a_file["bday"][0] == Datetime(2013, 3, 15) assert a_file.contentType == "text/flapdoodle", "contentType= %s" % a_file.contentType assert a_file["band"][0] == "Motörhead", "band= %s" % a_file["band"][0] assert a_file["lunch"][0] == "すし", "lunch= %s" % a_file["lunch"][0] a_file = syn.downloadEntity(a_file) assert filecmp.cmp(path, a_file.path) # Update the File a_file.path = path a_file["foo"] = "Another arbitrary chunk of text data" a_file["new_key"] = "A newly created value" a_file = syn.updateEntity(a_file) assert a_file["foo"][0] == "Another arbitrary chunk of text data" assert a_file["bar"] == [33, 44, 55] assert a_file["bday"][0] == Datetime(2013, 3, 15) assert a_file.new_key[0] == "A newly created value" assert a_file.path == path assert a_file.versionNumber == 1 # Upload a new File and verify new_path = utils.make_bogus_data_file() schedule_for_cleanup(new_path) a_file = syn.uploadFile(a_file, new_path) a_file = syn.downloadEntity(a_file) assert filecmp.cmp(new_path, a_file.path) assert a_file.versionNumber == 2 # Make sure we can still get the older version of file old_random_data = syn.get(a_file.id, version=1) assert filecmp.cmp(old_random_data.path, path)
def test_get_and_store(): """Test synapse.get and synapse.store in Project, Folder and File""" ## create project project = Project(name=str(uuid.uuid4()), description='A bogus test project') project = syn.store(project) schedule_for_cleanup(project) ## create folder folder = Folder('Bad stuff', parent=project, description='The rejects from the other fauxldurr', pi=3) folder = syn.store(folder) ## get folder folder = syn.get(folder.id) assert folder.name == 'Bad stuff' assert folder.parentId == project.id assert folder.description == 'The rejects from the other fauxldurr' assert folder.pi[0] == 3 ## update folder folder.pi = 3.14159265359 folder.description = 'The rejects from the other folder' syn.store(folder) ## verify that the updates stuck folder = syn.get(folder) assert folder.name == 'Bad stuff' assert folder.parentId == project.id assert folder.description == 'The rejects from the other folder' assert folder.pi[0] == 3.14159265359 ## upload a File path = utils.make_bogus_data_file() schedule_for_cleanup(path) random_data = File(path, parent=folder, description='Random data', foo=9844) random_data = syn.store(random_data) ## make sure file comes back intact random_data_2 = syn.downloadEntity(random_data) assert filecmp.cmp(path, random_data_2.path) assert random_data.foo[0] == 9844 ## update with a new File new_file_path = utils.make_bogus_data_file() schedule_for_cleanup(new_file_path) random_data.path = new_file_path random_data.foo = 1266 random_data = syn.store(random_data) ## should be version 2 assert random_data.versionNumber == 2 ## make sure the updates stuck random_data_2 = syn.get(random_data) assert random_data_2.path is not None assert filecmp.cmp(new_file_path, random_data_2.path) assert random_data_2.foo[0] == 1266 assert random_data_2.versionNumber == 2 ## make sure we can still get the older version of file old_random_data = syn.get(random_data.id, version=1) assert filecmp.cmp(old_random_data.path, path)
def test_Entity(): # Update the project project_name = str(uuid.uuid4()) project = Project(name=project_name) project = syn.store(project) schedule_for_cleanup(project) project = syn.getEntity(project) assert project.name == project_name # Create and get a Folder folder = Folder('Test Folder', parent=project, description='A place to put my junk', foo=1000) folder = syn.createEntity(folder) folder = syn.getEntity(folder) assert folder.name == 'Test Folder' assert folder.parentId == project.id assert folder.description == 'A place to put my junk' assert folder.foo[0] == 1000 # Update and get the Folder folder.pi = 3.14159265359 folder.description = 'The rejects from the other folder' folder = syn.store(folder) folder = syn.get(folder) assert folder.name == 'Test Folder' assert folder.parentId == project.id assert folder.description == 'The rejects from the other folder' assert folder.pi[0] == 3.14159265359 # Test CRUD on Files path = utils.make_bogus_data_file() schedule_for_cleanup(path) a_file = File(path, parent=folder, description='Random data for testing', contentType='text/flapdoodle', foo='An arbitrary value', bar=[33,44,55], bday=Datetime(2013,3,15)) a_file = syn._createFileEntity(a_file) assert a_file.path == path a_file = syn.getEntity(a_file) assert a_file['foo'][0] == 'An arbitrary value' assert a_file['bar'] == [33,44,55] assert a_file['bday'][0] == Datetime(2013,3,15) assert a_file.contentType == 'text/flapdoodle' a_file = syn.downloadEntity(a_file) assert filecmp.cmp(path, a_file.path) # Update the File a_file.path = path a_file['foo'] = 'Another arbitrary chunk of text data' a_file['new_key'] = 'A newly created value' a_file = syn.updateEntity(a_file) assert a_file['foo'][0] == 'Another arbitrary chunk of text data' assert a_file['bar'] == [33,44,55] assert a_file['bday'][0] == Datetime(2013,3,15) assert a_file.new_key[0] == 'A newly created value' assert a_file.path == path assert a_file.versionNumber == 1 # Upload a new File and verify new_path = utils.make_bogus_data_file() schedule_for_cleanup(new_path) a_file = syn.uploadFile(a_file, new_path) a_file = syn.downloadEntity(a_file) assert filecmp.cmp(new_path, a_file.path) assert a_file.versionNumber == 2 # Make sure we can still get the older version of file old_random_data = syn.get(a_file.id, version=1) assert filecmp.cmp(old_random_data.path, path)
def test_Entity(): # Update the project project_name = str(uuid.uuid4()) project = Project(name=project_name) project = syn.store(project) schedule_for_cleanup(project) project = syn.getEntity(project) assert project.name == project_name # Create and get a Folder folder = Folder('Test Folder', parent=project, description='A place to put my junk', foo=1000) folder = syn.createEntity(folder) folder = syn.getEntity(folder) assert folder.name == 'Test Folder' assert folder.parentId == project.id assert folder.description == 'A place to put my junk' assert folder.foo[0] == 1000 # Update and get the Folder folder.pi = 3.14159265359 folder.description = 'The rejects from the other folder' folder = syn.store(folder) folder = syn.get(folder) assert folder.name == 'Test Folder' assert folder.parentId == project.id assert folder.description == 'The rejects from the other folder' assert folder.pi[0] == 3.14159265359 # Test CRUD on Files, check unicode path = utils.make_bogus_data_file() schedule_for_cleanup(path) a_file = File(path, parent=folder, description=u'Description with funny characters: Déjà vu, ประเทศไทย, 中国', contentType='text/flapdoodle', foo='An arbitrary value', bar=[33,44,55], bday=Datetime(2013,3,15), band=u"Motörhead", lunch=u"すし") a_file = syn.store(a_file) assert a_file.path == path a_file = syn.getEntity(a_file) assert a_file.description == u'Description with funny characters: Déjà vu, ประเทศไทย, 中国', u'description= %s' % a_file.description assert a_file['foo'][0] == 'An arbitrary value', u'foo= %s' % a_file['foo'][0] assert a_file['bar'] == [33,44,55] assert a_file['bday'][0] == Datetime(2013,3,15) assert a_file.contentType == 'text/flapdoodle', u'contentType= %s' % a_file.contentType assert a_file['band'][0] == u"Motörhead", u'band= %s' % a_file['band'][0] assert a_file['lunch'][0] == u"すし", u'lunch= %s' % a_file['lunch'][0] a_file = syn.downloadEntity(a_file) assert filecmp.cmp(path, a_file.path) # Update the File a_file.path = path a_file['foo'] = 'Another arbitrary chunk of text data' a_file['new_key'] = 'A newly created value' a_file = syn.updateEntity(a_file) assert a_file['foo'][0] == 'Another arbitrary chunk of text data' assert a_file['bar'] == [33,44,55] assert a_file['bday'][0] == Datetime(2013,3,15) assert a_file.new_key[0] == 'A newly created value' assert a_file.path == path assert a_file.versionNumber == 1, "unexpected version number: " + str(a_file.versionNumber) # Upload a new File and verify new_path = utils.make_bogus_data_file() schedule_for_cleanup(new_path) a_file = syn.uploadFile(a_file, new_path) a_file = syn.downloadEntity(a_file) assert filecmp.cmp(new_path, a_file.path) assert a_file.versionNumber == 2 # Make sure we can still get the older version of file old_random_data = syn.get(a_file.id, version=1) assert filecmp.cmp(old_random_data.path, path)