def create_wiki(evaluation, challenge_home_entity, team, leaderboard_columns): """ Create landing page for challenge and a sub-page for a leaderboard. Note that, while this code demonstrates programmatic generation of wiki markdown including leader board table widget, the typical method for creating and editing such content is via the Synapse web portal (www.synapse.org). """ wiki = Wiki(owner=challenge_home_entity, markdown=CHALLENGE_PROJECT_WIKI.format( title=CHALLENGE_PROJECT_NAME, teamId=team['id'], evalId=evaluation.id)) wiki = syn.store(wiki) supertable = create_supertable_leaderboard(evaluation, leaderboard_columns) lb_wiki = Wiki(title="Leaderboard", owner=challenge_home_entity, parentWikiId=wiki.id, markdown=LEADERBOARD_MARKDOWN.format( evaluation_name=evaluation.name, supertable=supertable)) lb_wiki = syn.store(lb_wiki) return (wiki, lb_wiki)
def test_create_or_update_wiki(): # create wiki once wiki = syn.store(Wiki(title='This is the title', owner=project, markdown="#Wikis are OK\n\nBlabber jabber blah blah blither blather bonk!")) # for now, creating it again raises an exception, see SYNR-631 assert_raises(SynapseHTTPError, syn.store, Wiki(title='This is a different title', owner=project, markdown="#Wikis are awesome\n\nNew babble boo flabble gibber wiggle sproing!"), createOrUpdate=True)
def setup(self): # Create a Project self.project_entity = syn.store(Project(name=str(uuid.uuid4()))) filename = utils.make_bogus_data_file() attachname = utils.make_bogus_data_file() file_entity = syn.store(File(filename, parent=self.project_entity)) schedule_for_cleanup(self.project_entity.id) schedule_for_cleanup(filename) schedule_for_cleanup(file_entity.id) # Create mock wiki md = """ This is a test wiki ======================= Blabber jabber blah blah boo. syn123 syn456 """ wiki = Wiki(owner=self.project_entity, title='A Test Wiki', markdown=md, attachments=[attachname]) wiki = syn.store(wiki) # Create a Wiki sub-page subwiki = Wiki(owner=self.project_entity, title='A sub-wiki', markdown='%s' % file_entity.id, parentWikiId=wiki.id) self.subwiki = syn.store(subwiki) second_md = """ Testing internal links ====================== [test](#!Synapse:%s/wiki/%s) %s) """ % (self.project_entity.id, self.subwiki.id, file_entity.id) sub_subwiki = Wiki(owner=self.project_entity, title='A sub-sub-wiki', markdown=second_md, parentWikiId=self.subwiki.id, attachments=[attachname]) self.sub_subwiki = syn.store(sub_subwiki) # Set up the second project self.second_project = syn.store(Project(name=str(uuid.uuid4()))) schedule_for_cleanup(self.second_project.id) self.fileMapping = {'syn123': 'syn12345', 'syn456': 'syn45678'} self.first_headers = syn.getWikiHeaders(self.project_entity)
def test_dispose(syn_client, syn_test_helper, temp_file): project = syn_client.store(Project(name=syn_test_helper.uniq_name())) folder = syn_client.store( Folder(name=syn_test_helper.uniq_name(prefix='Folder '), parent=project)) file = syn_client.store( File(name=syn_test_helper.uniq_name(prefix='File '), path=temp_file, parent=folder)) team = syn_client.store( Team(name=syn_test_helper.uniq_name(prefix='Team '))) wiki = syn_client.store( Wiki(title=syn_test_helper.uniq_name(prefix='Wiki '), owner=project)) wikiChild = syn_client.store( Wiki(title=syn_test_helper.uniq_name(prefix='Wiki Child '), owner=project, parentWikiId=wiki.id)) syn_objects = [project, folder, file, team, wiki, wikiChild] for syn_obj in syn_objects: syn_test_helper.dispose_of(syn_obj) assert syn_obj in syn_test_helper._trash syn_test_helper.dispose() assert len(syn_test_helper._trash) == 0 for syn_obj in syn_objects: with pytest.raises( synapseclient.core.exceptions.SynapseHTTPError) as ex: if isinstance(syn_obj, Wiki): syn_client.getWiki(syn_obj) elif isinstance(syn_obj, Team): syn_client.getTeam(syn_obj.id) else: syn_client.get(syn_obj, downloadFile=False) err_str = str(ex.value) assert "Not Found" in err_str or "cannot be found" in err_str or "is in trash can" in err_str or "does not exist" in err_str try: os.remove(temp_file) except: pass
def test_Wiki__markdown_defined_markdownFile_is_None(): markdown = "Somebody once told me the OS was gonna delete me. I'm not the largest file on the disk." # method under test wiki = Wiki(owner="doesn't matter", markdown=markdown) assert markdown == wiki.markdown
def setup(self): """Setting up all variables for tests""" self.markdown = "test\nsyn123/wiki/2222\nsyn123%2Fwiki%2F2222\nsyn123" self.wiki_mapping = {'2222': '5555'} self.entity = Project(name="foo", id="syn123") self.destination = Project(name="test", id="syn555") self.expected_markdown = ( "test\nsyn555/wiki/5555\nsyn555%2Fwiki%2F5555\nsyn555") self.syn = mock.create_autospec(Synapse) self.entity_wiki = Wiki(markdown=self.markdown, id="2222", owner="syn123", attachmentFileHandleIds=['322', '333']) self.filehandles = [{ 'fileHandle': { "concreteType": mirrorwiki.PREVIEW_FILE_HANDLE, "contentType": "contenttype", "fileName": "name" } }, { 'fileHandle': { "concreteType": "not_preview", "contentType": "testing", "fileName": "foobar" } }] self.new_filehandle = [{'newFileHandle': {"id": "12356"}}] # wiki page mapping {'title': Wiki} self.entity_wiki_pages = {'test': self.entity_wiki} self.destination_wiki_pages = {'test': self.entity_wiki} # wiki headers self.entity_wiki_headers = [{"id": "2222", "title": "test"}] self.destination_wiki_headers = [{"id": "5555", "title": "test"}]
def create_leaderboard_wiki(intra_cell_type_evaluation, inter_cell_type_evaluation, regression_evaluation): LEADERBOARD_MARKDOWN = """\ ## Within Cell Type Classification {intra_cell_type_classification_supertable} ## Between Cell Type Classification {inter_cell_type_classification_supertable} ## Between Cell Type Regression {inter_cell_type_regression_supertable} """ leaderboard_wiki_text = LEADERBOARD_MARKDOWN.format( intra_cell_type_classification_supertable=create_supertable_leaderboard( intra_cell_type_evaluation, classification_leaderboard_columns), inter_cell_type_classification_supertable=create_supertable_leaderboard( inter_cell_type_evaluation, classification_leaderboard_columns), inter_cell_type_regression_supertable=create_supertable_leaderboard( regression_evaluation, regression_leaderboard_columns)) lb_wiki = Wiki(title="Leaderboard", owner=project, parentWikiId=wiki.id, markdown=leaderboard_wiki_text) lb_wiki = syn.store(lb_wiki)
def test_Wiki(): """Test the construction and accessors of Wiki objects.""" # Wiki contstuctor only takes certain values pytest.raises(ValueError, Wiki, title='foo') # Construct a wiki and test uri's Wiki(title='foobar2', markdown='bar', owner={'id': '5'})
def mutate(self, info, name, permissions, annotations, wiki, folders, posts): # Build the annotations project_annotations = {} if annotations: for annotation in annotations: project_annotations[annotation['key']] = annotation['value'] # Create the Project project = Synapse.client().store( Project(name=name, annotations=project_annotations) ) # Add the permissions if permissions: for permission in permissions: principal_id = permission['principal_id'] access = permission['access'] access_type = getattr(Synapse, '{0}_PERMS'.format(access)) Synapse.client().setPermissions( project, principal_id, accessType=access_type, warn_if_inherits=False ) # Add the the folders if folders: for folder_name in folders: Synapse.client().store(Folder(name=folder_name, parent=project)) # Add the posts if posts: forum_id = Synapse.client().restGET( '/project/{0}/forum'.format(project.id)).get('id') for post in posts: body = { 'forumId': forum_id, 'title': post['title'], 'messageMarkdown': post['message_markdown'] } Synapse.client().restPOST("/thread", body=json.dumps(body)) # Add the wiki if wiki: Synapse.client().store(Wiki(title=wiki.title, markdown=wiki.markdown, owner=project)) new_syn_project = SynProject.from_project(project) return CreateSynProject(syn_project=new_syn_project)
def test_Wiki__markdown_is_None_markdownFile_defined(): markdown_path = "/somewhere/over/the/rainbow.txt" with patch("synapseclient.wiki.open", mock_open(), create=True) as mocked_open,\ patch("os.path.isfile", return_value=True): # method under test Wiki(owner="doesn't matter", markdownFile=markdown_path) mocked_open.assert_called_once_with(markdown_path, 'r') mocked_open().read.assert_called_once_with()
def test_create_or_update_wiki(): # create wiki once wiki = syn.store( Wiki( title='This is the title', owner=project, markdown= "#Wikis are OK\n\nBlabber jabber blah blah blither blather bonk!")) # for now, creating it again it will be updated new_title = 'This is a different title' wiki = syn.store(Wiki( title=new_title, owner=project, markdown= "#Wikis are awesome\n\nNew babble boo flabble gibber wiggle sproing!"), createOrUpdate=True) assert_equal(new_title, syn.getWiki(wiki.ownerId)['title'])
def create_wiki(self, **kwargs): """Creates a new Wiki and adds it to the trash queue.""" if 'title' not in kwargs: kwargs['title'] = self.uniq_name(prefix=kwargs.get('prefix', '')) kwargs.pop('prefix', None) if 'markdown' not in kwargs: kwargs['markdown'] = 'My Wiki {0}'.format(kwargs['title']) wiki = SynapseProxy.client().store(Wiki(**kwargs)) self.dispose_of(wiki) return wiki
def test_Wiki__with_markdown_file(): markdown_data = """ MARK DOWN MARK DOWN MARK DOWN MARK DOWN AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA adkflajsl;kfjasd;lfkjsal;kfajslkfjasdlkfj """ markdown_path = "/somewhere/over/the/rainbow.txt" with patch("synapseclient.wiki.open", mock_open(read_data=markdown_data), create=True) as mocked_open,\ patch("os.path.isfile", return_value=True): # method under test wiki = Wiki(owner="doesn't matter", markdownFile=markdown_path) mocked_open.assert_called_once_with(markdown_path, 'r') mocked_open().read.assert_called_once_with() assert markdown_data == wiki.markdown
def test_copyFileHandleAndchangeFileMetadata(): project_entity = syn.store(Project(name=str(uuid.uuid4()))) schedule_for_cleanup(project_entity.id) filename = utils.make_bogus_data_file() attachname = utils.make_bogus_data_file() schedule_for_cleanup(filename) schedule_for_cleanup(attachname) file_entity = syn.store(File(filename, parent=project_entity)) schedule_for_cleanup(file_entity.id) wiki = Wiki(owner=project_entity, title='A Test Wiki', markdown="testing", attachments=[attachname]) wiki = syn.store(wiki) wikiattachments = syn._getFileHandle(wiki.attachmentFileHandleIds[0]) #CHECK: Can batch copy two file handles (wiki attachments and file entity) copiedFileHandles = synapseutils.copyFileHandles(syn, [file_entity.dataFileHandleId, wiki.attachmentFileHandleIds[0]], [file_entity.concreteType.split(".")[-1], "WikiAttachment"], [file_entity.id, wiki.id], [file_entity.contentType, wikiattachments['contentType']], [file_entity.name, wikiattachments['fileName']]) assert all([results.get("failureCode") is None for results in copiedFileHandles['copyResults']]), "NOT FOUND and UNAUTHORIZED failure codes." files = {file_entity.name:{"contentType":file_entity['contentType'], "md5":file_entity['md5']}, wikiattachments['fileName']:{"contentType":wikiattachments['contentType'], "md5":wikiattachments['contentMd5']}} for results in copiedFileHandles['copyResults']: i = results['newFileHandle'] assert files.get(i['fileName']) is not None, "Filename has to be the same" assert files[i['fileName']]['contentType'] == i['contentType'], "Content type has to be the same" assert files[i['fileName']]['md5'] == i['contentMd5'], "Md5 has to be the same" assert all([results.get("failureCode") is None for results in copiedFileHandles['copyResults']]), "There should not be NOT FOUND and UNAUTHORIZED failure codes." if 'username' not in other_user or 'password' not in other_user: sys.stderr.write('\nWarning: no test-authentication configured. skipping testing copy function when trying to copy file made by another user.\n') return syn_other = synapseclient.Synapse(skip_checks=True) syn_other.login(other_user['username'], other_user['password']) #CHECK: UNAUTHORIZED failure code should be returned output = synapseutils.copyFileHandles(syn_other,[file_entity.dataFileHandleId, wiki.attachmentFileHandleIds[0]], [file_entity.concreteType.split(".")[-1], "WikiAttachment"], [file_entity.id, wiki.id], [file_entity.contentType, wikiattachments['contentType']], [file_entity.name, wikiattachments['fileName']]) assert all([results.get("failureCode") == "UNAUTHORIZED" for results in output['copyResults']]), "UNAUTHORIZED codes." #CHECK: Changing content type and downloadAs new_entity = synapseutils.changeFileMetaData(syn, file_entity, contentType="application/x-tar", downloadAs="newName.txt") schedule_for_cleanup(new_entity.id) assert file_entity.md5 == new_entity.md5, "Md5s must be equal after copying" fileResult = syn._getFileHandleDownload(new_entity.dataFileHandleId, new_entity.id) assert fileResult['fileHandle']['fileName'] == "newName.txt", "Set new file name to be newName.txt" assert new_entity.contentType == "application/x-tar", "Set new content type to be application/x-tar"
def get_or_create_wiki(self, **kwargs) -> Wiki: """Gets an existing wiki or creates a new one. If parentWikiId is specified, a page will always be created. There are no restrictions on wiki titles on subwiki pages. Get doesn't work for subwiki pages Args: Same arguments as synapseclient.Wiki Returns: Synapse wiki page """ wiki = Wiki(**kwargs) wiki = self._find_by_obj_or_create(wiki) self.logger.info('{} Wiki {}'.format(self._update_str, wiki.title)) return wiki
def test_wiki_version(): ## create a new project to avoid artifacts from previous tests project = syn.store(Project(name=str(uuid.uuid4()))) wiki = syn.store( Wiki(title='Title version 1', owner=project, markdown="##A heading\n\nThis is version 1 of the wiki page!\n")) wiki.title = "Title version 2" wiki.markdown = "##A heading\n\nThis is version 2 of the wiki page!\n" wiki = syn.store(wiki) w1 = syn.getWiki(owner=wiki.ownerId, subpageId=wiki.id, version=0) assert "version 1" in w1.title assert "version 1" in w1.markdown w2 = syn.getWiki(owner=wiki.ownerId, subpageId=wiki.id, version=1) assert "version 2" in w2.title assert "version 2" in w2.markdown
def copyWiki(syn, entity, destinationId, entitySubPageId=None, destinationSubPageId=None, updateLinks=True, updateSynIds=True, entityMap=None): """ Copies wikis and updates internal links :param syn: A synapse object: syn = synapseclient.login()- Must be logged into synapse :param entity: A synapse ID of an entity whose wiki you want to copy :param destinationId: Synapse ID of a folder/project that the wiki wants to be copied to :param updateLinks: Update all the internal links. (e.g. syn1234/wiki/34345 becomes syn3345/wiki/49508) Defaults to True :param updateSynIds: Update all the synapse ID's referenced in the wikis. (e.g. syn1234 becomes syn2345) Defaults to True but needs an entityMap :param entityMap: An entity map {'oldSynId','newSynId'} to update the synapse IDs referenced in the wiki. Defaults to None :param entitySubPageId: Can specify subPageId and copy all of its subwikis Defaults to None, which copies the entire wiki subPageId can be found: https://www.synapse.org/#!Synapse:syn123/wiki/1234 In this case, 1234 is the subPageId. :param destinationSubPageId: Can specify destination subPageId to copy wikis to Defaults to None :returns: A list of Objects with three fields: id, title and parentId. """ # Validate input parameters if entitySubPageId: entitySubPageId = str(int(entitySubPageId)) if destinationSubPageId: destinationSubPageId = str(int(destinationSubPageId)) oldOwn = syn.get(entity, downloadFile=False) # getWikiHeaders fails when there is no wiki try: oldWikiHeaders = syn.getWikiHeaders(oldOwn) except SynapseHTTPError as e: if e.response.status_code == 404: return [] else: raise e newOwn = syn.get(destinationId, downloadFile=False) wikiIdMap = dict() newWikis = dict() # If entitySubPageId is given but not destinationSubPageId, set the pageId to "" (will get the root page) # A entitySubPage could be copied to a project without any wiki pages, this has to be checked newWikiPage = None if destinationSubPageId: try: newWikiPage = syn.getWiki(newOwn, destinationSubPageId) except SynapseHTTPError as e: if e.response.status_code == 404: pass else: raise e if entitySubPageId: oldWikiHeaders = _getSubWikiHeaders(oldWikiHeaders, entitySubPageId) if not oldWikiHeaders: return [] for wikiHeader in oldWikiHeaders: wiki = syn.getWiki(oldOwn, wikiHeader['id']) print('Got wiki %s' % wikiHeader['id']) if not wiki.get('attachmentFileHandleIds'): new_file_handles = [] else: results = [ syn._getFileHandleDownload(filehandleId, wiki.id, objectType='WikiAttachment') for filehandleId in wiki['attachmentFileHandleIds'] ] # Get rid of the previews nopreviews = [ attach['fileHandle'] for attach in results if not attach['fileHandle']['isPreview'] ] contentTypes = [attach['contentType'] for attach in nopreviews] fileNames = [attach['fileName'] for attach in nopreviews] copiedFileHandles = copyFileHandles( syn, nopreviews, ["WikiAttachment"] * len(nopreviews), [wiki.id] * len(nopreviews), contentTypes, fileNames) # Check if failurecodes exist for filehandle in copiedFileHandles: if filehandle.get("failureCode") is not None: raise ValueError("%s dataFileHandleId: %s" % (filehandle["failureCode"], filehandle['originalFileHandleId'])) new_file_handles = [ filehandle['newFileHandle']['id'] for filehandle in copiedFileHandles ] # for some reason some wikis don't have titles? if hasattr(wikiHeader, 'parentId'): newWikiPage = Wiki(owner=newOwn, title=wiki.get('title', ''), markdown=wiki.markdown, fileHandles=new_file_handles, parentWikiId=wikiIdMap[wiki.parentWikiId]) newWikiPage = syn.store(newWikiPage) else: if destinationSubPageId is not None and newWikiPage is not None: newWikiPage["attachmentFileHandleIds"] = new_file_handles newWikiPage["markdown"] = wiki["markdown"] newWikiPage["title"] = wiki.get("title", "") # Need to add logic to update titles here newWikiPage = syn.store(newWikiPage) else: newWikiPage = Wiki(owner=newOwn, title=wiki.get("title", ""), markdown=wiki.markdown, fileHandles=new_file_handles, parentWikiId=destinationSubPageId) newWikiPage = syn.store(newWikiPage) newWikis[newWikiPage['id']] = newWikiPage wikiIdMap[wiki['id']] = newWikiPage['id'] if updateLinks: newWikis = _updateInternalLinks(newWikis, wikiIdMap, entity, destinationId) if updateSynIds and entityMap is not None: newWikis = _updateSynIds(newWikis, wikiIdMap, entityMap) print("Storing new Wikis\n") for oldWikiId in wikiIdMap.keys(): newWikiId = wikiIdMap[oldWikiId] newWikis[newWikiId] = syn.store(newWikis[newWikiId]) print("\tStored: %s\n" % newWikiId) return syn.getWikiHeaders(newOwn)
def test_copyFileHandleAndchangeFileMetadata(): project_entity = syn.store(Project(name=str(uuid.uuid4()))) schedule_for_cleanup(project_entity.id) filename = utils.make_bogus_data_file() attachname = utils.make_bogus_data_file() schedule_for_cleanup(filename) schedule_for_cleanup(attachname) file_entity = syn.store(File(filename, parent=project_entity)) schedule_for_cleanup(file_entity.id) wiki = Wiki(owner=project_entity, title='A Test Wiki', markdown="testing", attachments=[attachname]) wiki = syn.store(wiki) wikiattachments = syn._getFileHandle(wiki.attachmentFileHandleIds[0]) # CHECK: Can batch copy two file handles (wiki attachments and file entity) copiedFileHandles = synapseutils.copyFileHandles( syn, [file_entity.dataFileHandleId, wiki.attachmentFileHandleIds[0]], [file_entity.concreteType.split(".")[-1], "WikiAttachment"], [file_entity.id, wiki.id], [file_entity.contentType, wikiattachments['contentType']], [file_entity.name, wikiattachments['fileName']]) for results in copiedFileHandles['copyResults']: assert_is_none(results.get("failureCode"), "NOT FOUND and UNAUTHORIZED failure codes.") files = { file_entity.name: { "contentType": file_entity['contentType'], "md5": file_entity['md5'] }, wikiattachments['fileName']: { "contentType": wikiattachments['contentType'], "md5": wikiattachments['contentMd5'] } } for results in copiedFileHandles['copyResults']: i = results['newFileHandle'] assert_is_not_none(files.get(i['fileName']), "Filename has to be the same") assert_equals(files[i['fileName']]['contentType'], i['contentType'], "Content type has to be the same") assert_equals(files[i['fileName']]['md5'], i['contentMd5'], "Md5 has to be the same") for results in copiedFileHandles['copyResults']: assert_is_none( results.get("failureCode"), "There should not be NOT FOUND and UNAUTHORIZED failure codes.") # CHECK: Changing content type and downloadAs new_entity = synapseutils.changeFileMetaData( syn, file_entity, contentType="application/x-tar", downloadAs="newName.txt") schedule_for_cleanup(new_entity.id) assert_equals(file_entity.md5, new_entity.md5, "Md5s must be equal after copying") fileResult = syn._getFileHandleDownload(new_entity.dataFileHandleId, new_entity.id) assert_equals(fileResult['fileHandle']['fileName'], "newName.txt", "Set new file name to be newName.txt") assert_equals(new_entity.contentType, "application/x-tar", "Set new content type to be application/x-tar")
def test_copyWiki(): # Create a Project project_entity = syn.store(Project(name=str(uuid.uuid4()))) schedule_for_cleanup(project_entity.id) folder_entity = syn.store( Folder(name=str(uuid.uuid4()), parent=project_entity)) schedule_for_cleanup(folder_entity.id) second_folder = syn.store( Folder(name=str(uuid.uuid4()), parent=project_entity)) schedule_for_cleanup(second_folder.id) third_folder = syn.store( Folder(name=str(uuid.uuid4()), parent=project_entity)) schedule_for_cleanup(third_folder.id) filename = utils.make_bogus_data_file() attachname = utils.make_bogus_data_file() schedule_for_cleanup(filename) file_entity = syn.store(File(filename, parent=folder_entity)) nested_folder = syn.store( Folder(name=str(uuid.uuid4()), parent=folder_entity)) second_file = syn.store(File(filename, parent=nested_folder)) schedule_for_cleanup(file_entity.id) schedule_for_cleanup(nested_folder.id) schedule_for_cleanup(second_file.id) fileWiki = Wiki(owner=second_file, title='A Test Wiki', markdown="Test") fileWiki = syn.store(fileWiki) # Create mock wiki md = """ This is a test wiki ======================= Blabber jabber blah blah boo. %s %s """ % (file_entity.id, second_file.id) wiki = Wiki(owner=project_entity, title='A Test Wiki', markdown=md, attachments=[attachname]) wiki = syn.store(wiki) # Create a Wiki sub-page subwiki = Wiki(owner=project_entity, title='A sub-wiki', markdown='%s' % file_entity.id, parentWikiId=wiki.id) subwiki = syn.store(subwiki) second_md = """ Testing internal links ====================== [test](#!Synapse:%s/wiki/%s) %s) """ % (project_entity.id, subwiki.id, second_file.id) sub_subwiki = Wiki(owner=project_entity, title='A sub-sub-wiki', markdown=second_md, parentWikiId=subwiki.id, attachments=[attachname]) sub_subwiki = syn.store(sub_subwiki) # Copy wiki to second project second_project = syn.store(Project(name=str(uuid.uuid4()))) schedule_for_cleanup(second_project.id) fileMapping = synapseutils.copy(syn, project_entity, second_project.id, skipCopyWikiPage=True) # Test: copyWikiPage = False assert_raises(SynapseHTTPError, syn.getWiki, second_project.id) first_headers = syn.getWikiHeaders(project_entity) second_headers = synapseutils.copyWiki(syn, project_entity.id, second_project.id, entityMap=fileMapping) mapping = dict() # Test: Check that all wikis were copied correctly with the correct mapping for index, info in enumerate(second_headers): mapping[first_headers[index]['id']] = info['id'] assert_equals(first_headers[index]['title'], info['title']) if info.get('parentId', None) is not None: # Check if parent Ids are mapping correctly in the copied Wikis assert_equals(info['parentId'], mapping[first_headers[index]['parentId']]) # Test: Check that all wikis have the correct attachments and have correct internal synapse link/file mapping for index, info in enumerate(second_headers): # Check if markdown is the correctly mapped orig_wikiPage = syn.getWiki(project_entity, first_headers[index]['id']) new_wikiPage = syn.getWiki(second_project, info['id']) s = orig_wikiPage.markdown for oldWikiId in mapping.keys(): oldProjectAndWikiId = "%s/wiki/%s" % (project_entity.id, oldWikiId) newProjectAndWikiId = "%s/wiki/%s" % (second_project.id, mapping[oldWikiId]) s = re.sub(oldProjectAndWikiId, newProjectAndWikiId, s) for oldFileId in fileMapping.keys(): s = re.sub(oldFileId, fileMapping[oldFileId], s) assert_equals(s, new_wikiPage.markdown) orig_attach = syn.getWikiAttachments(orig_wikiPage) new_attach = syn.getWikiAttachments(new_wikiPage) orig_file = [ i['fileName'] for i in orig_attach if i['concreteType'] != "org.sagebionetworks.repo.model.file.PreviewFileHandle" ] new_file = [ i['fileName'] for i in new_attach if i['concreteType'] != "org.sagebionetworks.repo.model.file.PreviewFileHandle" ] # check that attachment file names are the same assert_equals(orig_file, new_file) # Test: copyWikiPage = True (Default) (Should copy all wikis including wikis on files) third_project = syn.store(Project(name=str(uuid.uuid4()))) schedule_for_cleanup(third_project.id) copiedFile = synapseutils.copy(syn, second_file, third_project.id) copiedWiki = syn.getWiki(copiedFile[second_file.id]) assert_equals(copiedWiki.title, fileWiki.title) assert_equals(copiedWiki.markdown, fileWiki.markdown) # Test: entitySubPageId third_header = synapseutils.copyWiki(syn, project_entity.id, third_project.id, entitySubPageId=sub_subwiki.id, destinationSubPageId=None, updateLinks=False, updateSynIds=False, entityMap=fileMapping) test_ent_subpage = syn.getWiki(third_project.id, third_header[0]['id']) # Test: No internal links updated assert_equals(test_ent_subpage.markdown, sub_subwiki.markdown) assert_equals(test_ent_subpage.title, sub_subwiki.title) # Test: destinationSubPageId fourth_header = synapseutils.copyWiki( syn, project_entity.id, third_project.id, entitySubPageId=subwiki.id, destinationSubPageId=test_ent_subpage.id, updateLinks=False, updateSynIds=False, entityMap=fileMapping) temp = syn.getWiki(third_project.id, fourth_header[0]['id']) # There are issues where some title pages are blank. This is an issue that needs to be addressed assert_equals(temp.title, subwiki.title) assert_equals(temp.markdown, subwiki.markdown) temp = syn.getWiki(third_project.id, fourth_header[1]['id']) assert_equals(temp.title, sub_subwiki.title) assert_equals(temp.markdown, sub_subwiki.markdown)
def test_wikiAttachment(): # Upload a file to be attached to a Wiki filename = utils.make_bogus_data_file() attachname = utils.make_bogus_data_file() schedule_for_cleanup(filename) schedule_for_cleanup(attachname) fileHandle = syn._uploadToFileHandleService(filename) # Create and store a Wiki # The constructor should accept both file handles and file paths md = """ This is a test wiki ======================= Blabber jabber blah blah boo. """ wiki = Wiki(owner=project, title='A Test Wiki', markdown=md, fileHandles=[fileHandle['id']], attachments=[attachname]) wiki = syn.store(wiki) # Create a Wiki sub-page subwiki = Wiki(owner=project, title='A sub-wiki', markdown='nothing', parentWikiId=wiki.id) subwiki = syn.store(subwiki) # Retrieve the root Wiki from Synapse wiki2 = syn.getWiki(project) assert wiki == wiki2 # Retrieve the sub Wiki from Synapse wiki2 = syn.getWiki(project, subpageId=subwiki.id) assert subwiki == wiki2 # Try making an update wiki['title'] = 'A New Title' wiki['markdown'] = wiki['markdown'] + "\nNew stuff here!!!\n" wiki = syn.store(wiki) assert wiki['title'] == 'A New Title' assert wiki['markdown'].endswith("\nNew stuff here!!!\n") # Check the Wiki's metadata headers = syn.getWikiHeaders(project) assert len(headers) == 2 assert headers[0]['title'] in (wiki['title'], subwiki['title']) # # Retrieve the file attachment # tmpdir = tempfile.mkdtemp() # file_props = syn._downloadWikiAttachment(project, wiki, # os.path.basename(filename), dest_dir=tmpdir) # path = file_props['path'] # assert os.path.exists(path) # assert filecmp.cmp(original_path, path) # Clean up # syn._deleteFileHandle(fileHandle) syn.delete(wiki) syn.delete(subwiki) assert_raises(SynapseHTTPError, syn.getWiki, project)
def copyWiki(syn, entity, destinationId, entitySubPageId=None, destinationSubPageId=None, updateLinks=True, updateSynIds=True, entityMap=None): """ Copies wikis and updates internal links :param syn: A synapse object: syn = synapseclient.login()- Must be logged into synapse :param entity: A synapse ID of an entity whose wiki you want to copy :param destinationId: Synapse ID of a folder/project that the wiki wants to be copied to :param updateLinks: Update all the internal links Defaults to True :param updateSynIds: Update all the synapse ID's referenced in the wikis Defaults to True but needs an entityMap :param entityMap: An entity map {'oldSynId','newSynId'} to update the synapse IDs referenced in the wiki Defaults to None :param entitySubPageId: Can specify subPageId and copy all of its subwikis Defaults to None, which copies the entire wiki subPageId can be found: https://www.synapse.org/#!Synapse:syn123/wiki/1234 In this case, 1234 is the subPageId. :param destinationSubPageId: Can specify destination subPageId to copy wikis to Defaults to None """ oldOwn = syn.get(entity,downloadFile=False) # getWikiHeaders fails when there is no wiki try: oldWh = syn.getWikiHeaders(oldOwn) store = True except SynapseHTTPError: store = False if store: if entitySubPageId is not None: oldWh = _getSubWikiHeaders(oldWh,entitySubPageId,mapping=[]) newOwn =syn.get(destinationId,downloadFile=False) wikiIdMap =dict() newWikis=dict() for i in oldWh: attDir=tempfile.NamedTemporaryFile(prefix='attdir',suffix='') #print i['id'] wiki = syn.getWiki(oldOwn, i.id) print('Got wiki %s' % i.id) if wiki['attachmentFileHandleIds'] == []: attachments = [] elif wiki['attachmentFileHandleIds'] != []: uri = "/entity/%s/wiki/%s/attachmenthandles" % (wiki.ownerId, wiki.id) results = syn.restGET(uri) file_handles = {fh['id']:fh for fh in results['list']} ## need to download an re-upload wiki attachments, ug! attachments = [] tempdir = tempfile.gettempdir() for fhid in wiki.attachmentFileHandleIds: file_info = syn._downloadWikiAttachment(wiki.ownerId, wiki, file_handles[fhid]['fileName'], destination=tempdir) attachments.append(file_info['path']) #for some reason some wikis don't have titles? if hasattr(i, 'parentId'): wNew = Wiki(owner=newOwn, title=wiki.get('title',''), markdown=wiki.markdown, attachments=attachments, parentWikiId=wikiIdMap[wiki.parentWikiId]) wNew = syn.store(wNew) else: if destinationSubPageId is not None: wNew = syn.getWiki(newOwn, destinationSubPageId) wNew.attachments = attachments wNew.markdown = wiki.markdown #Need to add logic to update titles here wNew = syn.store(wNew) else: wNew = Wiki(owner=newOwn, title=wiki.get('title',''), markdown=wiki.markdown, attachments=attachments, parentWikiId=destinationSubPageId) wNew = syn.store(wNew) newWikis[wNew.id]=wNew wikiIdMap[wiki.id] =wNew.id if updateLinks: print("Updating internal links:\n") for oldWikiId in wikiIdMap.keys(): # go through each wiki page once more: newWikiId=wikiIdMap[oldWikiId] newWiki=newWikis[newWikiId] print("\tUpdating internal links for Page: %s\n" % newWikiId) s=newWiki.markdown # in the markdown field, replace all occurrences of entity/wiki/abc with destinationId/wiki/xyz, # where wikiIdMap maps abc->xyz # replace <entity>/wiki/<oldWikiId> with <destinationId>/wiki/<newWikiId> for oldWikiId2 in wikiIdMap.keys(): oldProjectAndWikiId = "%s/wiki/%s" % (entity, oldWikiId2) newProjectAndWikiId = "%s/wiki/%s" % (destinationId, wikiIdMap[oldWikiId2]) s=re.sub(oldProjectAndWikiId, newProjectAndWikiId, s) # now replace any last references to entity with destinationId s=re.sub(entity, destinationId, s) newWikis[newWikiId].markdown=s if updateSynIds and entityMap is not None: print("Updating Synapse references:\n") for oldWikiId in wikiIdMap.keys(): # go through each wiki page once more: newWikiId = wikiIdMap[oldWikiId] newWiki = newWikis[newWikiId] print('Updated Synapse references for Page: %s\n' %newWikiId) s = newWiki.markdown for oldSynId in entityMap.keys(): # go through each wiki page once more: newSynId = entityMap[oldSynId] s = re.sub(oldSynId, newSynId, s) print("Done updating Synpase IDs.\n") newWikis[newWikiId].markdown = s print("Storing new Wikis\n") for oldWikiId in wikiIdMap.keys(): newWikiId = wikiIdMap[oldWikiId] newWikis[newWikiId] = syn.store(newWikis[newWikiId]) print("\tStored: %s\n" % newWikiId) newWh = syn.getWikiHeaders(newOwn) return(newWh) else: return("no wiki")
def test_wikiAttachment(): # Upload a file to be attached to a Wiki filename = utils.make_bogus_data_file() attachname = utils.make_bogus_data_file() schedule_for_cleanup(filename) schedule_for_cleanup(attachname) fileHandle = upload_synapse_s3(syn, filename) # Create and store a Wiki # The constructor should accept both file handles and file paths md = """ This is a test wiki ======================= Blabber jabber blah blah boo. """ wiki = Wiki(owner=project, title='A Test Wiki', markdown=md, fileHandles=[fileHandle['id']], attachments=[attachname]) wiki = syn.store(wiki) # Create a Wiki sub-page subwiki = Wiki(owner=project, title='A sub-wiki', markdown='nothing', parentWikiId=wiki.id) subwiki = syn.store(subwiki) # Retrieve the root Wiki from Synapse wiki2 = syn.getWiki(project) ## due to the new wiki api, we'll get back some new properties, ## namely markdownFileHandleId and markdown_path, so only compare ## properties that are in the first object for property_name in wiki: assert_equal(wiki[property_name], wiki2[property_name]) # Retrieve the sub Wiki from Synapse wiki2 = syn.getWiki(project, subpageId=subwiki.id) for property_name in wiki: assert_equal(subwiki[property_name], wiki2[property_name]) # Try making an update wiki['title'] = 'A New Title' wiki['markdown'] = wiki['markdown'] + "\nNew stuff here!!!\n" wiki = syn.store(wiki) wiki = syn.getWiki(project) assert wiki['title'] == 'A New Title' assert wiki['markdown'].endswith("\nNew stuff here!!!\n") # Check the Wiki's metadata headers = syn.getWikiHeaders(project) assert len(headers) == 2 assert headers[0]['title'] in (wiki['title'], subwiki['title']) file_handles = syn.getWikiAttachments(wiki) file_names = [fh['fileName'] for fh in file_handles] assert all( os.path.basename(fn) in file_names for fn in [filename, attachname]) syn.delete(subwiki) syn.delete(wiki) assert_raises(SynapseHTTPError, syn.getWiki, project)
def mutate(self, info, name, **kwargs): errors = [] permissions = kwargs.get('permissions', None) annotations = kwargs.get('annotations', None) wiki = kwargs.get('wiki', None) folders = kwargs.get('folders', None) posts = kwargs.get('posts', None) # Check if a project with the same name already exists. project_name_taken = Synapse.client().findEntityId(name) is not None if project_name_taken: raise ValueError( 'Another Synapse project with the name: {0} already exists.'. format(name)) # Build the annotations project_annotations = {} if annotations: for annotation in annotations: project_annotations[annotation['key']] = annotation['value'] # Create the Project project = Synapse.client().store( Project(name=name, annotations=project_annotations)) # Add the permissions if permissions: for permission in permissions: try: principal_id = permission.get('principal_id') access = permission.get('access') access_type = getattr(Synapse, '{0}_PERMS'.format(access)) try: Synapse.client().setPermissions(project, principal_id, accessType=access_type, warn_if_inherits=False) except Exception as syn_ex: logger.exception( 'Error setting permission: {0} - {1}'.format( permission, syn_ex)) if 'a foreign key constraint fails' in str(syn_ex): errors.append( 'User or Team ID: {0} does not exist.'.format( principal_id)) else: errors.append( 'Error setting permission for User or Team ID: {0}' .format(principal_id)) except Exception as ex: logger.exception( 'Error creating project permissions: {0} - {1}'.format( permission, ex)) errors.append('Error creating project permissions.') # Add the the folders if folders: for folder_path in folders: folder_path_parts = list(filter(None, folder_path.split('/'))) parent = project for folder_name in folder_path_parts: try: parent = Synapse.client().store( Folder(name=folder_name, parent=parent)) except Exception as syn_ex: logger.exception( 'Error creating project folder: {0} - {1}'.format( folder_name, syn_ex)) errors.append( 'Error creating project folder: {0}.'.format( folder_name)) # Add the posts if posts: try: forum_id = Synapse.client().restGET( '/project/{0}/forum'.format(project.id)).get('id') for post in posts: try: body = { 'forumId': forum_id, 'title': post.get('title'), 'messageMarkdown': post.get('message_markdown') } Synapse.client().restPOST('/thread', body=json.dumps(body)) except Exception as syn_ex: logger.exception( 'Error creating project post: {0} - {1}'.format( post, syn_ex)) errors.append( 'Error creating project post: {0}.'.format( post.get('title', None))) except Exception as ex: logger.exception( 'Error creating project posts: {1} - {0}'.format( posts, ex)) errors.append('Error creating projects posts.') # Add the wiki if wiki: try: Synapse.client().store( Wiki(title=wiki.title, markdown=wiki.markdown, owner=project)) except Exception as syn_ex: logger.exception( 'Error creating project wiki: {0} - {1}'.format( wiki, syn_ex)) errors.append('Error creating project wiki.') new_syn_project = SynProject.from_project(project) return CreateSynProject(syn_project=new_syn_project, errors=(errors if errors else None))
def copyWiki(syn, entity, destinationId, entitySubPageId=None, destinationSubPageId=None, updateLinks=True, updateSynIds=True, entityMap=None): """ Copies wikis and updates internal links :param syn: A synapse object: syn = synapseclient.login()- Must be logged into synapse :param entity: A synapse ID of an entity whose wiki you want to copy :param destinationId: Synapse ID of a folder/project that the wiki wants to be copied to :param updateLinks: Update all the internal links. (e.g. syn1234/wiki/34345 becomes syn3345/wiki/49508) Defaults to True :param updateSynIds: Update all the synapse ID's referenced in the wikis. (e.g. syn1234 becomes syn2345) Defaults to True but needs an entityMap :param entityMap: An entity map {'oldSynId','newSynId'} to update the synapse IDs referenced in the wiki Defaults to None :param entitySubPageId: Can specify subPageId and copy all of its subwikis Defaults to None, which copies the entire wiki subPageId can be found: https://www.synapse.org/#!Synapse:syn123/wiki/1234 In this case, 1234 is the subPageId. :param destinationSubPageId: Can specify destination subPageId to copy wikis to Defaults to None :returns: A list of Objects with three fields: id, title and parentId. """ stagingSyn = synapseclient.login() stagingSyn.setEndpoints(**synapseclient.client.STAGING_ENDPOINTS) oldOwn = stagingSyn.get(entity,downloadFile=False) # getWikiHeaders fails when there is no wiki try: oldWh = stagingSyn.getWikiHeaders(oldOwn) except SynapseHTTPError as e: if e.response.status_code == 404: return([]) else: raise e if entitySubPageId is not None: oldWh = _getSubWikiHeaders(oldWh,entitySubPageId) newOwn =syn.get(destinationId,downloadFile=False) wikiIdMap = dict() newWikis = dict() for wikiHeader in oldWh: attDir=tempfile.NamedTemporaryFile(prefix='attdir',suffix='') #print i['id'] wiki = stagingSyn.getWiki(oldOwn, wikiHeader.id) print('Got wiki %s' % wikiHeader.id) if wiki['attachmentFileHandleIds'] == []: attachments = [] elif wiki['attachmentFileHandleIds'] != []: attachments = [] tempdir = tempfile.gettempdir() for filehandleId in wiki['attachmentFileHandleIds']: result = stagingSyn._getFileHandleDownload(filehandleId, wiki.id, objectType='WikiAttachment') file_info = stagingSyn._downloadFileHandle(result['preSignedURL'],tempdir,result['fileHandle']) attachments.append(file_info) #for some reason some wikis don't have titles? if hasattr(wikiHeader, 'parentId'): wNew = Wiki(owner=newOwn, title=wiki.get('title',''), markdown=wiki.markdown, attachments=attachments, parentWikiId=wikiIdMap[wiki.parentWikiId]) wNew = syn.store(wNew) else: if destinationSubPageId is not None: wNew = syn.getWiki(newOwn, destinationSubPageId) wNew.attachments = attachments wNew.markdown = wiki.markdown #Need to add logic to update titles here wNew = syn.store(wNew) else: wNew = Wiki(owner=newOwn, title=wiki.get('title',''), markdown=wiki.markdown, attachments=attachments, parentWikiId=destinationSubPageId) wNew = syn.store(wNew) newWikis[wNew.id]=wNew wikiIdMap[wiki.id] =wNew.id if updateLinks: newWikis = _updateInternalLinks(newWikis, wikiIdMap, entity, destinationId) if updateSynIds and entityMap is not None: newWikis = _updateSynIds(newWikis, wikiIdMap, entityMap) print("Storing new Wikis\n") for oldWikiId in wikiIdMap.keys(): newWikiId = wikiIdMap[oldWikiId] newWikis[newWikiId] = syn.store(newWikis[newWikiId]) print("\tStored: %s\n" % newWikiId) newWh = syn.getWikiHeaders(newOwn) return(newWh)
def test_Wiki__markdown_and_markdownFile_both_defined(): with pytest.raises(ValueError): Wiki(owner="doesn't matter", markdown="asdf", markdownFile="~/fakeFile.txt")
#! /usr/bin/env python # KKD for Sage Bionetworks # Jan. 25, 2016 # sys.argv[1] = folder ID containing bigwigs to which to add genome browser in wiki import sys import synapseclient from synapseclient import Wiki syn = synapseclient.login() results = syn.chunkedQuery('select id,fileType,organism,versionNumber from file where parentId=="%s"' % sys.argv[1]) for result in results: # print '%s' % result if result['file.fileType'][0] == "bigwig": temp = syn.get(result['file.id'], downloadFile = False) if result['file.organism'][0] == "H**o sapiens": browser = ''.join([' """${biodalliance13?chr=1&species=HUMAN&viewStart=3025001&viewEnd=3525001&source0=%7B"name"%3A""%2C "entityId"%3A"', result['file.id'], '"%2C "entityVersion"%3A"', str(result['file.versionNumber']), '"%2C "styleType"%3A"default"%2C "styleGlyphType"%3A"HISTOGRAM"%2C "color"%3A"%23808080"%2C "type"%3A"BIGWIG"%2C "height"%3A"120"%7D}""" ' ]) # print '%s' % browser elif result['file.organism'][0] == "Mus musculus": browser = ''.join([' """${biodalliance13?chr=1&species=MOUSE&viewStart=3025001&viewEnd=3525001&source0=%7B"name"%3A""%2C "entityId"%3A"', result['file.id'], '"%2C "entityVersion"%3A"', str(result['file.versionNumber']), '"%2C "styleType"%3A"default"%2C "styleGlyphType"%3A"HISTOGRAM"%2C "color"%3A"%23808080"%2C "type"%3A"BIGWIG"%2C "height"%3A"120"%7D}""" ' ]) wiki = Wiki(owner=temp, markdown=browser) wiki = syn.store(wiki)
def test_wiki_with_none_attachments(): syn = Synapse(skip_checks=True) with patch.object(syn, 'restPOST'): w = Wiki(owner="syn1", markdown="markdown", attachments=None) syn.store(w)
def test_Wiki__markdownFile_path_not_exist(): # method under test with pytest.raises(ValueError): Wiki(owner="doesn't matter", markdownFile="/this/is/not/the/file/you/are/looking.for")