def test_create_or_update_project():
    name = str(uuid.uuid4())

    project = Project(name, a=1, b=2)
    proj_for_cleanup = syn.store(project)
    schedule_for_cleanup(proj_for_cleanup)

    project = Project(name, b=3, c=4)
    project = syn.store(project)

    assert_equals(project.a, [1])
    assert_equals(project.b, [3])
    assert_equals(project.c, [4])

    project = syn.get(project.id)

    assert_equals(project.a, [1])
    assert_equals(project.b, [3])
    assert_equals(project.c, [4])

    project = Project(name, c=5, d=6)
    try:
        project = syn.store(project, createOrUpdate=False)
        assert False, "Expect an exception from storing an existing project with createOrUpdate=False"
    except Exception as ex1:
        pass
    def setup(self):
        """Setting up all variables for tests"""
        self.markdown = "test\nsyn123/wiki/2222\nsyn123%2Fwiki%2F2222\nsyn123"
        self.wiki_mapping = {'2222': '5555'}
        self.entity = Project(name="foo", id="syn123")
        self.destination = Project(name="test", id="syn555")
        self.expected_markdown = (
            "test\nsyn555/wiki/5555\nsyn555%2Fwiki%2F5555\nsyn555")
        self.syn = mock.create_autospec(Synapse)
        self.entity_wiki = Wiki(markdown=self.markdown,
                                id="2222",
                                owner="syn123",
                                attachmentFileHandleIds=['322', '333'])
        self.filehandles = [{
            'fileHandle': {
                "concreteType": mirrorwiki.PREVIEW_FILE_HANDLE,
                "contentType": "contenttype",
                "fileName": "name"
            }
        }, {
            'fileHandle': {
                "concreteType": "not_preview",
                "contentType": "testing",
                "fileName": "foobar"
            }
        }]
        self.new_filehandle = [{'newFileHandle': {"id": "12356"}}]
        # wiki page mapping {'title': Wiki}
        self.entity_wiki_pages = {'test': self.entity_wiki}
        self.destination_wiki_pages = {'test': self.entity_wiki}

        # wiki headers
        self.entity_wiki_headers = [{"id": "2222", "title": "test"}]
        self.destination_wiki_headers = [{"id": "5555", "title": "test"}]
def test_create_or_update_project():
    name = str(uuid.uuid4())

    project = Project(name, a=1, b=2)
    syn.store(project)

    project = Project(name, b=3, c=4)
    project = syn.store(project)

    assert project.a == [1]
    assert project.b == [3]
    assert project.c == [4]

    project = syn.get(project.id)

    assert project.a == [1]
    assert project.b == [3]
    assert project.c == [4]

    project = Project(name, c=5, d=6)
    try:
        project = syn.store(project, createOrUpdate=False)
        assert False, "Expect an exception from storing an existing project with createOrUpdate=False"
    except Exception as ex1:
        pass
def testMoveProject(syn, schedule_for_cleanup):
    proj1 = syn.store(Project(name=str(uuid.uuid4()) +
                              "testMoveProject-child"))
    proj2 = syn.store(
        Project(name=str(uuid.uuid4()) + "testMoveProject-newParent"))
    pytest.raises(SynapseHTTPError, syn.move, proj1, proj2)
    schedule_for_cleanup(proj1)
    schedule_for_cleanup(proj2)
    def setup(self):
        # Create a Project
        self.project_entity = syn.store(Project(name=str(uuid.uuid4())))
        filename = utils.make_bogus_data_file()
        attachname = utils.make_bogus_data_file()
        file_entity = syn.store(File(filename, parent=self.project_entity))

        schedule_for_cleanup(self.project_entity.id)
        schedule_for_cleanup(filename)
        schedule_for_cleanup(file_entity.id)

        # Create mock wiki
        md = """
        This is a test wiki
        =======================
    
        Blabber jabber blah blah boo.
        syn123
        syn456
        """

        wiki = Wiki(owner=self.project_entity,
                    title='A Test Wiki',
                    markdown=md,
                    attachments=[attachname])
        wiki = syn.store(wiki)

        # Create a Wiki sub-page
        subwiki = Wiki(owner=self.project_entity,
                       title='A sub-wiki',
                       markdown='%s' % file_entity.id,
                       parentWikiId=wiki.id)
        self.subwiki = syn.store(subwiki)

        second_md = """
        Testing internal links
        ======================
    
        [test](#!Synapse:%s/wiki/%s)
    
        %s)
        """ % (self.project_entity.id, self.subwiki.id, file_entity.id)

        sub_subwiki = Wiki(owner=self.project_entity,
                           title='A sub-sub-wiki',
                           markdown=second_md,
                           parentWikiId=self.subwiki.id,
                           attachments=[attachname])
        self.sub_subwiki = syn.store(sub_subwiki)

        # Set up the second project
        self.second_project = syn.store(Project(name=str(uuid.uuid4())))
        schedule_for_cleanup(self.second_project.id)

        self.fileMapping = {'syn123': 'syn12345', 'syn456': 'syn45678'}

        self.first_headers = syn.getWikiHeaders(self.project_entity)
def test_store_redundantly_named_projects():
    p1 = create_project()

    # If we store a project with the same name, and createOrUpdate==True,
    # it should become an update
    p2 = Project(p1.name)
    p2.updatedThing = 'Yep, sho\'nuf it\'s updated!'
    p2 = syn.store(p2, createOrUpdate=True)

    assert p1.id == p2.id
    assert p2.updatedThing == ['Yep, sho\'nuf it\'s updated!']
def test_store_redundantly_named_projects():
    p1 = create_project()

    # If we store a project with the same name, and createOrUpdate==True,
    # it should become an update
    p2 = Project(p1.name)
    p2.updatedThing = 'Yep, sho\'nuf it\'s updated!'
    p2 = syn.store(p2, createOrUpdate=True)

    assert p1.id == p2.id
    assert p2.updatedThing == ['Yep, sho\'nuf it\'s updated!']
def test_syncFromSynapse__project_contains_empty_folder(syn):
    project = Project(name="the project", parent="whatever", id="syn123")
    file = File(name="a file", parent=project, id="syn456")
    folder = Folder(name="a folder", parent=project, id="syn789")

    entities = {
        file.id: file,
        folder.id: folder,
    }

    def syn_get_side_effect(entity, *args, **kwargs):
        return entities[id_of(entity)]

    with patch.object(syn, "getChildren", side_effect=[[folder, file], []]) as patch_syn_get_children,\
            patch.object(syn, "get", side_effect=syn_get_side_effect) as patch_syn_get:

        assert [file] == synapseutils.syncFromSynapse(syn, project)
        expected_get_children_agrs = [call(project['id']), call(folder['id'])]
        assert expected_get_children_agrs == patch_syn_get_children.call_args_list
        patch_syn_get.assert_called_once_with(
            file['id'],
            downloadLocation=None,
            ifcollision='overwrite.local',
            followLink=False,
            downloadFile=True,
        )
def test_check_size_each_file(mock_os, syn):
    """
    Verify the check_size_each_file method works correctly
    """

    project_id = "syn123"
    header = 'path\tparent\n'
    path1 = os.path.abspath(os.path.expanduser('~/file1.txt'))
    path2 = 'http://www.synapse.org'
    path3 = os.path.abspath(os.path.expanduser('~/file3.txt'))
    path4 = 'http://www.github.com'

    row1 = f'{path1}\t{project_id}\n'
    row2 = f'{path2}\t{project_id}\n'
    row3 = f'{path3}\t{project_id}\n'
    row4 = f'{path4}\t{project_id}\n'

    manifest = StringIO(header + row1 + row2 + row3 + row4)
    mock_os.path.isfile.side_effect = [True, True, True, False]
    mock_os.path.abspath.side_effect = [path1, path3]
    mock_stat = MagicMock(spec='st_size')
    mock_os.stat.return_value = mock_stat
    mock_stat.st_size = 5

    # mock syn.get() to return a project because the final check is making sure parent is a container
    with patch.object(syn, "get", return_value=Project()):
        sync.readManifestFile(syn, manifest)
        mock_os.stat.call_count == 4
Exemple #10
0
def submit_workflow(syn, args):
    """
    Submit to challenge
    """

    print(args.CWLfile)
    args.CWLfile = merge(syn, args)
    challenge = args.challenge
    challenge = challenge.lower()
    if challenge not in ("fusion", "isoform"):
        raise ValueError("Must select either 'fusion' or 'isoform'")
    try:
        validate_workflow(syn, args)
        print("\n\n###SUBMITTING MERGED WORKFLOW: %s###\n\n" % args.CWLfile)
        if args.projectId is None:
            print "No projectId is specified, a project is being created"
            project = syn.store(
                Project("SMC-RNA-Challenge %s %s" %
                        (syn.getUserProfile().userName, time.time())))
            args.projectId = project['id']
            print "View your project here: https://www.synapse.org/#!Synapse:%s" % args.projectId
        CWL = syn.store(File(args.CWLfile, parent=project))
        submission = syn.submit(EVALUATION_QUEUE_ID[challenge],
                                CWL,
                                name=CWL.name,
                                team=args.teamName)
        print "Created submission ID: %s" % submission.id
    except Exception as e:
        print(e)
    ## When you submit, you grant permissions to the Admin team
    give_synapse_permissions(syn, syn.get(args.projectId),
                             CHALLENGE_ADMIN_TEAM_ID)
    print("Administrator access granted to challenge admins")
def test_readManifestFile__synapseStore_values_are_set():

    project_id = "syn123"
    header = 'path\tparent\tsynapseStore\n'
    path1 = os.path.abspath(os.path.expanduser('~/file1.txt'))
    path2 = 'http://www.synapse.org'
    path3 = os.path.abspath(os.path.expanduser('~/file3.txt'))
    path4 = 'http://www.github.com'
    path5 = os.path.abspath(os.path.expanduser('~/file5.txt'))
    path6 = 'http://www.checkoutmymixtapefam.com/fire.mp3'

    row1 = '%s\t%s\tTrue\n' % (path1, project_id)
    row2 = '%s\t%s\tTrue\n' % (path2, project_id)
    row3 = '%s\t%s\tFalse\n' % (path3, project_id)
    row4 = '%s\t%s\tFalse\n' % (path4, project_id)
    row5 = '%s\t%s\t""\n' % (path5, project_id)
    row6 = '%s\t%s\t""\n' % (path6, project_id)

    expected_synapseStore = {
        str(path1): True,
        str(path2): False,
        str(path3): False,
        str(path4): False,
        str(path5): True,
        str(path6): False
    }

    manifest = StringIO(header+row1+row2+row3+row4+row5+row6)
    with patch.object(syn, "get", return_value=Project()),\
         patch.object(os.path, "isfile", return_value=True):  # mocks values for: file1.txt, file3.txt, file5.txt
        manifest_dataframe = synapseutils.sync.readManifestFile(syn, manifest)

        actual_synapseStore = (manifest_dataframe.set_index('path')['synapseStore'].to_dict())
        assert_dict_equal(expected_synapseStore, actual_synapseStore)
def archive(evaluation, stat="VALIDATED", reArchive=False):
    """
    Archive the submissions for the given evaluation queue and store them in the destination synapse folder.

    :param evaluation: a synapse evaluation queue or its ID
    :param query: a query that will return the desired submissions. At least the ID must be returned.
                  defaults to _select * from evaluation_[EVAL_ID] where status=="SCORED"_.
    """
    if type(evaluation) != Evaluation:
        evaluation = syn.getEvaluation(evaluation)

    print "\n\nArchiving", evaluation.id, evaluation.name
    print "-" * 60
    sys.stdout.flush()

    for submission, status in syn.getSubmissionBundles(evaluation, status=stat):
        ## retrieve file into cache and copy it to destination
        checkIfArchived = filter(lambda x: x.get("key") == "archived", status.annotations['stringAnnos'])
        if len(checkIfArchived)==0 or reArchive:
            projectEntity = Project('Archived %s %d %s %s' % (submission.name,int(round(time.time() * 1000)),submission.id,submission.entityId))
            entity = syn.store(projectEntity)
            adminPriv = ['DELETE','DOWNLOAD','CREATE','READ','CHANGE_PERMISSIONS','UPDATE','MODERATE','CHANGE_SETTINGS']
            syn.setPermissions(entity,"3324230",adminPriv)
            syn.setPermissions(entity,"3329874",adminPriv)
            syn.setPermissions(entity,"3356007",["READ","DOWNLOAD"])
            copied = synu.copy(syn, submission.entityId, entity.id)
            archived = {"archived":entity.id}
            status = update_single_submission_status(status, archived)
            syn.store(status)
def test_check_entity_restrictions():
    current_user_id = int(syn.getUserProfile()['ownerId'])

    #use other user to create a file
    other_syn = synapseclient.login(other_user['username'],
                                    other_user['password'])
    proj = other_syn.store(
        Project(name=str(uuid.uuid4()) + 'test_check_entity_restrictions'))
    a_file = other_syn.store(File('~/idk',
                                  parent=proj,
                                  description='A place to put my junk',
                                  foo=1000,
                                  synapseStore=False),
                             isRestricted=True)

    #no download permissions
    other_syn.setPermissions(proj, syn.username, accessType=['READ'])

    #attempt to get file
    assert_raises(SynapseUnmetAccessRestrictions,
                  syn.get,
                  a_file.id,
                  downloadFile=True)

    other_syn.delete(proj)
    def interaction_func(self, submission, admin):
        """Archives Project Submissions

        Args:
            submission: Submission object
            admin: Specify Synapse userid/team for archive to be
                   shared with
        Returns:
            archive status dict
        """

        project_entity = Project('Archived {} {} {} {}'.format(
            submission.name.replace("&", "+").replace("'", ""),
            int(round(time.time() * 1000)),
            submission.id,
            submission.entityId))
        new_project_entity = self.syn.store(project_entity)
        permissions.set_entity_permissions(self.syn, new_project_entity,
                                           admin, "admin")

        synapseutils.copy(self.syn, submission.entityId,
                          new_project_entity.id)
        archived = {"archived": new_project_entity.id}

        archive_status = {'valid': True,
                          'annotations': archived,
                          'message': "Archived!"}
        return archive_status
Exemple #15
0
    async def find_or_create_project(self, project_name_or_id):
        project = await SynapseProxy.find_project_by_name_or_id(project_name_or_id, self.log_error)

        if project:
            logging.info('[Project FOUND] {0}: {1}'.format(project.id, project.name))
            if not self.has_write_permissions(project):
                self.log_error('Script user does not have WRITE permission to Project: {0}'.format(project_name_or_id))
                project = None
        else:
            try:
                bad_name_chars = Utils.get_invalid_synapse_name_chars(project_name_or_id)
                if bad_name_chars:
                    self.log_error(
                        'Project name: "{0}" contains invalid characters: "{1}"'.format(project_name_or_id,
                                                                                        ''.join(bad_name_chars)))
                else:
                    project = await SynapseProxy.storeAsync(Project(project_name_or_id))
                    logging.info('[Project CREATED] {0}: {1}'.format(project.id, project.name))
                    if self._storage_location_id:
                        logging.info('Setting storage location for project: {0}: {1}'.format(project.id, project.name))
                        SynapseProxy.client().setStorageLocation(project, self._storage_location_id)

                    if self._admin_team:
                        logging.info(
                            'Granting admin permissions to team on Project: {0}: {1}'.format(project.id, project.name))
                        self.grant_admin_access(project, self._admin_team.id)
            except Exception as ex:
                self.log_error('Error creating project: {0}, {1}'.format(project_name_or_id, ex))

        if project:
            self.set_synapse_parent(project)

        return project
def test_syncFromSynapse__downloadFile_is_false(syn):
    """
    Verify when passing the argument downloadFile is equal to False,
    syncFromSynapse won't download the file to clients' local end.
    """

    project = Project(name="the project", parent="whatever", id="syn123")
    file = File(name="a file", parent=project, id="syn456")
    folder = Folder(name="a folder", parent=project, id="syn789")

    entities = {
        file.id: file,
        folder.id: folder,
    }

    def syn_get_side_effect(entity, *args, **kwargs):
        return entities[id_of(entity)]

    with patch.object(syn, "getChildren", side_effect=[[folder, file], []]),\
            patch.object(syn, "get", side_effect=syn_get_side_effect) as patch_syn_get:

        synapseutils.syncFromSynapse(syn, project, downloadFile=False)
        patch_syn_get.assert_called_once_with(
            file['id'],
            downloadLocation=None,
            ifcollision='overwrite.local',
            followLink=False,
            downloadFile=False,
        )
Exemple #17
0
def setup_module(module):
    print("Python version:", sys.version)

    syn = synapseclient.Synapse(debug=True, skip_checks=True)

    print("Testing against endpoints:")
    print("  " + syn.repoEndpoint)
    print("  " + syn.authEndpoint)
    print("  " + syn.fileHandleEndpoint)
    print("  " + syn.portalEndpoint + "\n")

    syn.login()
    module.syn = syn
    module._to_cleanup = []

    # Make one project for all the tests to use
    project = syn.store(
        Project(name="integration_test_project" + str(uuid.uuid4())))
    schedule_for_cleanup(project)
    module.project = project

    #set the working directory to a temp directory
    module._old_working_directory = os.getcwd()
    working_directory = tempfile.mkdtemp(prefix="someTestFolder")
    schedule_for_cleanup(working_directory)
    os.chdir(working_directory)
Exemple #18
0
def test_dispose(syn_client, syn_test_helper, new_temp_file):
    project = syn_client.store(Project(name=syn_test_helper.uniq_name()))

    folder = syn_client.store(
        Folder(name=syn_test_helper.uniq_name(prefix='Folder '),
               parent=project))

    file = syn_client.store(
        File(name=syn_test_helper.uniq_name(prefix='File '),
             path=new_temp_file,
             parent=folder))

    syn_objects = [project, folder, file]

    for syn_obj in syn_objects:
        syn_test_helper.dispose_of(syn_obj)
        assert syn_obj in syn_test_helper._trash

    syn_test_helper.dispose()
    assert len(syn_test_helper._trash) == 0

    for syn_obj in syn_objects:
        with pytest.raises(synapseclient.exceptions.SynapseHTTPError) as ex:
            syn_client.get(syn_obj, downloadFile=False)

        err_str = str(ex.value)
        assert "Not Found" in err_str or "cannot be found" in err_str or "is in trash can" in err_str or "does not exist" in err_str

    try:
        os.remove(new_temp_file)
    except:
        pass
    def start(self):
        if self._dry_run:
            logging.info('~~ Dry Run ~~')

        self.login()

        project = self._synapse_client.get(Project(id=self._synapse_project))
        self.set_synapse_folder(self._synapse_project, project)

        logging.info('Upload to Project: {0} ({1})'.format(
            project.name, project.id))
        logging.info('Upload Directory: {0}'.format(self._local_path))
        logging.info('Upload To: {0}'.format(
            os.path.join(self._synapse_project, (self._remote_path or ''))))
        logging.info('Max Threads: {0}'.format(self._thread_count))

        logging.info('Loading Files...')
        self.load_files()
        logging.info('Total Synapse Folders: {0}'.format(len(self._folders)))
        logging.info('Total Files: {0}'.format(len(self._files)))

        if self._create_manifest_only:
            logging.info('Generating Manifest File...')
            self.create_manifest()
        else:
            logging.info('Uploading Files...')
            self.upload_files()
Exemple #20
0
def setup(module):

    module.syn = integration.syn

    module.project = syn.store(Project(name=str(uuid.uuid4())))
    schedule_for_cleanup(module.project)
    module.folder = syn.store(
        Folder(name=str(uuid.uuid4()), parent=module.project))

    # Create testfiles for upload
    module.f1 = utils.make_bogus_data_file(n=10)
    module.f2 = utils.make_bogus_data_file(n=10)
    f3 = 'https://www.synapse.org'

    schedule_for_cleanup(module.f1)
    schedule_for_cleanup(module.f2)

    module.header = 'path	parent	used	executed	activityName	synapseStore	foo\n'
    module.row1 = '%s	%s	%s	"%s;https://www.example.com"	provName		bar\n' % (
        f1, project.id, f2, f3)
    module.row2 = '%s	%s	"syn12"	"syn123;https://www.example.com"	provName2		bar\n' % (
        f2, folder.id)
    module.row3 = '%s	%s	"syn12"		prov2	False	baz\n' % (f3, folder.id)
    module.row4 = '%s	%s	%s		act		2\n' % (f3, project.id, f1
                                          )  # Circular reference
    module.row5 = '%s	syn12					\n' % (f3)  # Wrong parent
Exemple #21
0
def setUpSynapseProject(foldersToCreate, syn, pid=None, pname=None):
    '''Creates Synapse project and necessary folders for the dataset.'''

    # Create a set of sub-folders expected for this project
    folderSchemaSet = set(foldersToCreate)

    # Get the project if it exists or create
    if pid == None:
        project = Project(pname)
        project = syn.store(project)
    else:
        project = syn.get(pid)
        print '%s' % project.name

        existingFolders = getExistingFolders(syn, project.id)
        if len(existingFolders) > 0:
            foldersToCreate = folderSchemaSet.difference(
                existingFolders.keys())

    # create the folders that don't exist
    for name in foldersToCreate:
        createFolder = Folder(name, parent=project.id)
        createFolder = syn.store(createFolder)
        existingFolders[name] = createFolder.id
    return (project, existingFolders)
Exemple #22
0
def test_readManifest__sync_order_with_home_directory():
    """SYNPY-508"""
    if not pandas_available:
        raise SkipTest("pandas was not found. Skipping test.")

    #row1's file depends on row2's file but is listed first
    file_path1 = '~/file1.txt'
    file_path2 = '~/file2.txt'
    project_id = "syn123"
    header = 'path	parent	used	executed	activityName	synapseStore	foo\n'
    row1 = '%s\t%s\t%s\t""\tprovActivity1\tTrue\tsomeFooAnnotation1\n' % (
        file_path1, project_id, file_path2)
    row2 = '%s\t%s\t""\t""\tprovActivity2\tTrue\tsomeFooAnnotation2\n' % (
        file_path2, project_id)

    manifest = StringIO(header + row1 + row2)
    # mock syn.get() to return a project because the final check is making sure parent is a container
    #mock isfile() to always return true to avoid having to create files in the home directory
    with patch.object(syn, "get", return_value=Project()),\
         patch.object(os.path, "isfile", side_effect=[True,True,True,False]): #side effect mocks values for: manfiest file, file1.txt, file2.txt, isfile(project.id) check in syn.get()
        manifest_dataframe = synapseutils.sync.readManifestFile(syn, manifest)
        expected_order = pd.Series([
            os.path.normpath(os.path.expanduser(file_path2)),
            os.path.normpath(os.path.expanduser(file_path1))
        ])
        pdt.assert_series_equal(expected_order,
                                manifest_dataframe.path,
                                check_names=False)
def test_getChildren(syn, schedule_for_cleanup):
    # setup a hierarchy for folders
    # PROJECT
    # |     \
    # File   Folder
    #           |
    #         File
    project_name = str(uuid.uuid1())
    test_project = syn.store(Project(name=project_name))
    folder = syn.store(Folder(name="firstFolder", parent=test_project))
    syn.store(
        File(path="~/doesntMatter.txt",
             name="file inside folders",
             parent=folder,
             synapseStore=False))
    project_file = syn.store(
        File(path="~/doesntMatterAgain.txt",
             name="file inside project",
             parent=test_project,
             synapseStore=False))
    schedule_for_cleanup(test_project)

    expected_id_set = {project_file.id, folder.id}
    children_id_set = {x['id'] for x in syn.getChildren(test_project.id)}
    assert expected_id_set == children_id_set
Exemple #24
0
def test_walk():
    walked = []
    firstfile = utils.make_bogus_data_file()
    schedule_for_cleanup(firstfile)
    project_entity = syn.store(Project(name=str(uuid.uuid4())))
    schedule_for_cleanup(project_entity.id)
    folder_entity = syn.store(
        Folder(name=str(uuid.uuid4()), parent=project_entity))
    schedule_for_cleanup(folder_entity.id)
    second_folder = syn.store(
        Folder(name=str(uuid.uuid4()), parent=project_entity))
    schedule_for_cleanup(second_folder.id)
    file_entity = syn.store(File(firstfile, parent=project_entity))
    schedule_for_cleanup(file_entity.id)

    walked.append(((project_entity.name, project_entity.id), [
        (folder_entity.name, folder_entity.id),
        (second_folder.name, second_folder.id)
    ], [(file_entity.name, file_entity.id)]))

    nested_folder = syn.store(
        Folder(name=str(uuid.uuid4()), parent=folder_entity))
    schedule_for_cleanup(nested_folder.id)
    secondfile = utils.make_bogus_data_file()
    schedule_for_cleanup(secondfile)
    second_file = syn.store(File(secondfile, parent=nested_folder))
    schedule_for_cleanup(second_file.id)
    thirdfile = utils.make_bogus_data_file()
    schedule_for_cleanup(thirdfile)
    third_file = syn.store(File(thirdfile, parent=second_folder))
    schedule_for_cleanup(third_file.id)

    walked.append(((os.path.join(project_entity.name,
                                 folder_entity.name), folder_entity.id),
                   [(nested_folder.name, nested_folder.id)], []))
    walked.append(
        ((os.path.join(os.path.join(project_entity.name, folder_entity.name),
                       nested_folder.name), nested_folder.id), [],
         [(second_file.name, second_file.id)]))
    walked.append(((os.path.join(project_entity.name, second_folder.name),
                    second_folder.id), [], [(third_file.name, third_file.id)]))

    temp = synapseutils.walk(syn, project_entity.id)
    temp = list(temp)
    #Must sort the tuples returned, because order matters for the assert
    #Folders are returned in a different ordering depending on the name
    for i in walked:
        for x in i:
            if type(x) == list:
                x = x.sort()
    for i in temp:
        for x in i:
            if type(x) == list:
                x = x.sort()
        assert i in walked

    print("CHECK: synapseutils.walk on a file should return empty generator")
    temp = synapseutils.walk(syn, second_file.id)
    assert list(temp) == []
def test_syncFromSynapse__manifest_is_root(
        mock__get_file_entity_provenance_dict, mock_generateManifest, syn):
    """
    Verify manifest argument equal to "root" that pass in to syncFromSynapse, it will create root_manifest file only.
    """

    project = Project(name="the project", parent="whatever", id="syn123")
    file1 = File(name="a file", parent=project, id="syn456")
    folder = Folder(name="a folder", parent=project, id="syn789")
    file2 = File(name="a file2", parent=folder, id="syn789123")

    # Structure of nested project
    # project
    #    |---> file1
    #    |---> folder
    #             |---> file2

    entities = {
        file1.id: file1,
        folder.id: folder,
        file2.id: file2,
    }

    def syn_get_side_effect(entity, *args, **kwargs):
        return entities[id_of(entity)]

    mock__get_file_entity_provenance_dict.return_value = {}

    with patch.object(syn, "getChildren", side_effect=[[folder, file1], [file2]]),\
            patch.object(syn, "get", side_effect=syn_get_side_effect) as patch_syn_get:

        synapseutils.syncFromSynapse(syn,
                                     project,
                                     path="./",
                                     downloadFile=False,
                                     manifest="root")
        assert patch_syn_get.call_args_list == [
            call(
                file1['id'],
                downloadLocation="./",
                ifcollision='overwrite.local',
                followLink=False,
                downloadFile=False,
            ),
            call(
                file2['id'],
                downloadLocation="./a folder",
                ifcollision='overwrite.local',
                followLink=False,
                downloadFile=False,
            )
        ]

        assert mock_generateManifest.call_count == 1

        call_files = mock_generateManifest.call_args_list[0][0][1]
        assert len(call_files) == 2
        assert call_files[0].id == "syn456"
        assert call_files[1].id == "syn789123"
Exemple #26
0
def test_setPermissions__default_permissions():
    temp_proj = syn.store(Project(name=str(uuid.uuid4())))
    schedule_for_cleanup(temp_proj)

    syn.setPermissions(temp_proj, other_user['username'])
    permissions = syn.getPermissions(temp_proj, other_user['username'])

    assert_equals(set(['READ', 'DOWNLOAD']), set(permissions))
    def mutate(self,
               info,
               name,
               permissions,
               annotations,
               wiki,
               folders,
               posts):

        # Build the annotations
        project_annotations = {}
        if annotations:
            for annotation in annotations:
                project_annotations[annotation['key']] = annotation['value']

        # Create the Project
        project = Synapse.client().store(
            Project(name=name, annotations=project_annotations)
        )

        # Add the permissions
        if permissions:
            for permission in permissions:
                principal_id = permission['principal_id']
                access = permission['access']
                access_type = getattr(Synapse, '{0}_PERMS'.format(access))

                Synapse.client().setPermissions(
                    project,
                    principal_id,
                    accessType=access_type,
                    warn_if_inherits=False
                )

        # Add the the folders
        if folders:
            for folder_name in folders:
                Synapse.client().store(Folder(name=folder_name, parent=project))

        # Add the posts
        if posts:
            forum_id = Synapse.client().restGET(
                '/project/{0}/forum'.format(project.id)).get('id')
            for post in posts:
                body = {
                    'forumId': forum_id,
                    'title': post['title'],
                    'messageMarkdown': post['message_markdown']
                }
                Synapse.client().restPOST("/thread", body=json.dumps(body))

        # Add the wiki
        if wiki:
            Synapse.client().store(Wiki(title=wiki.title, markdown=wiki.markdown, owner=project))

        new_syn_project = SynProject.from_project(project)

        return CreateSynProject(syn_project=new_syn_project)
Exemple #28
0
def test_get_entity_owned_by_another_user():
    if 'username' not in other_user or 'password' not in other_user:
        sys.stderr.write(
            '\nWarning: no test-authentication configured. skipping test_get_entity_owned_by_another.\n'
        )
        return

    try:
        syn_other = synapseclient.Synapse(skip_checks=True)
        syn_other.login(other_user['username'], other_user['password'])

        project = Project(name=str(uuid.uuid4()))
        project = syn_other.store(project)

        filepath = utils.make_bogus_data_file()
        a_file = File(filepath,
                      parent=project,
                      description='asdf qwer',
                      foo=1234)
        a_file = syn_other.store(a_file)

        current_user_id = int(syn.getUserProfile()['ownerId'])

        # Update the acl to give the current user read permissions
        syn_other.setPermissions(a_file,
                                 current_user_id,
                                 accessType=['READ', 'DOWNLOAD'],
                                 modify_benefactor=True)

        # Test whether the benefactor's ACL was modified
        assert_equals(set(syn_other.getPermissions(project, current_user_id)),
                      set(['READ', 'DOWNLOAD']))

        # Add a new permission to a user with existing permissions
        # make this change on the entity itself, not its benefactor
        syn_other.setPermissions(a_file,
                                 current_user_id,
                                 accessType=['READ', 'UPDATE', 'DOWNLOAD'],
                                 modify_benefactor=False,
                                 warn_if_inherits=False)
        permissions = syn_other.getPermissions(a_file, current_user_id)
        assert 'READ' in permissions
        assert 'UPDATE' in permissions
        assert len(permissions) == 3

        syn_other.setPermissions(a_file,
                                 current_user_id,
                                 accessType=['READ', 'DOWNLOAD'])
        assert_equals(set(syn_other.getPermissions(a_file, current_user_id)),
                      set(['DOWNLOAD', 'READ']))

        other_users_file = syn.get(a_file.id)
        a_file = syn_other.get(a_file.id)

        assert other_users_file == a_file
    finally:
        syn_other.logout()
Exemple #29
0
    def create_project(self, **kwargs):
        """Creates a new Project and adds it to the trash queue."""
        if 'name' not in kwargs:
            kwargs['name'] = self.uniq_name(prefix=kwargs.get('prefix', ''))

        kwargs.pop('prefix', None)

        project = SynapseProxy.client().store(Project(**kwargs))
        self.dispose_of(project)
        return project
def test_syncFromSynapse__manifest_value_is_invalid(syn):
    project = Project(name="the project", parent="whatever", id="syn123")
    with pytest.raises(ValueError) as ve:
        synapseutils.syncFromSynapse(syn,
                                     project,
                                     path="./",
                                     downloadFile=False,
                                     manifest="invalid_str")
    assert str(
        ve.value
    ) == 'Value of manifest option should be one of the ("all", "root", "suppress")'
def archive(evaluation, archiveType, destination=None, name=None, query=None):
    """
    Archive the submissions for the given evaluation queue and store them in the destination synapse folder.

    :param evaluation: a synapse evaluation queue or its ID
    :param destination: a synapse folder or its ID
    :param query: a query that will return the desired submissions. At least the ID must be returned.
                  defaults to _select * from evaluation_[EVAL_ID] where status=="SCORED"_.
    """
    tempdir = tempfile.mkdtemp()
    archive_dirname = 'submissions_%s' % utils.id_of(evaluation)

    if not query:
        query = 'select * from evaluation_%s where status=="SCORED"' % utils.id_of(evaluation)

    ## for each submission, download it's associated file and write a line of metadata
    results = Query(query=query)
    if 'objectId' not in results.headers:
        raise ValueError("Can't find the required field \"objectId\" in the results of the query: \"{0}\"".format(query))
    if archiveType == "submission":
        if not name:
            name = 'submissions_%s.tgz' % utils.id_of(evaluation)
        tar_path = os.path.join(tempdir, name)
        print "creating tar at:", tar_path
        print results.headers
        with tarfile.open(tar_path, mode='w:gz') as archive:
            with open(os.path.join(tempdir, 'submission_metadata.csv'), 'w') as f:
                f.write( (','.join(hdr for hdr in (results.headers + ['filename'])) + '\n').encode('utf-8') )
                for result in results:
                    ## retrieve file into cache and copy it to destination
                    submission = syn.getSubmission(result[results.headers.index('objectId')])
                    prefixed_filename = submission.id + "_" + os.path.basename(submission.filePath)
                    archive.add(submission.filePath, arcname=os.path.join(archive_dirname, prefixed_filename))
                    line = (','.join(unicode(item) for item in (result+[prefixed_filename]))).encode('utf-8')
                    print line
                    f.write(line + '\n')
            archive.add(
                name=os.path.join(tempdir, 'submission_metadata.csv'),
                arcname=os.path.join(archive_dirname, 'submission_metadata.csv'))

        entity = syn.store(File(tar_path, parent=destination), evaluation_id=utils.id_of(evaluation))
        print("created:", entity.id, entity.name)
        toReturn = entity.id
    else:
        toReturn = {}
        for result in results:
            ## retrieve file into cache and copy it to destination
            submission = syn.getSubmission(result[results.headers.index('objectId')])
            projectEntity = Project('Archived %s %s %s %s' % (time.strftime("%Y%m%d"),submission.id,submission.entity.id,submission.entity.name))
            entity = syn.store(projectEntity)
            copied = synu.copy(syn, submission.entity.id, entity.id)
            toReturn.update(copied)
    return toReturn
def test_store_with_flags():
    # -- CreateOrUpdate flag for Projects --
    # If we store a project with the same name, it should become an update
    projUpdate = Project(project.name)
    projUpdate.updatedThing = 'Yep, sho\'nuf it\'s updated!'
    projUpdate = syn.store(projUpdate, createOrUpdate=True)
    assert_equals(project.id, projUpdate.id)
    assert_equals(projUpdate.updatedThing, ['Yep, sho\'nuf it\'s updated!'])

    # Store a File
    filepath = utils.make_bogus_binary_file()
    schedule_for_cleanup(filepath)
    origBogus = File(filepath, name='Bogus Test File', parent=project)
    origBogus = syn.store(origBogus, createOrUpdate=True)
    assert_equals(origBogus.versionNumber, 1)

    # Modify existing annotations by createOrUpdate
    del projUpdate['parentId']
    del projUpdate['id']
    projUpdate.updatedThing = 'Updated again'
    projUpdate.addedThing = 'Something new'
    projUpdate = syn.store(projUpdate, createOrUpdate=True)
    assert_equals(project.id, projUpdate.id)
    assert_equals(projUpdate.updatedThing, ['Updated again'])
    
    # -- ForceVersion flag --
    # Re-store the same thing and don't up the version
    mutaBogus = syn.store(origBogus, forceVersion=False)
    assert_equals(mutaBogus.versionNumber, 1)
    
    # Re-store again, essentially the same condition
    mutaBogus = syn.store(mutaBogus, createOrUpdate=True, forceVersion=False)
    assert_equals(mutaBogus.versionNumber, 1, "expected version 1 but got version %s" % mutaBogus.versionNumber)
    
    # And again, but up the version this time
    mutaBogus = syn.store(mutaBogus, forceVersion=True)
    assert_equals(mutaBogus.versionNumber, 2)

    # -- CreateOrUpdate flag for files --
    # Store a different file with the same name and parent
    # Expected behavior is that a new version of the first File will be created
    new_filepath = utils.make_bogus_binary_file()
    schedule_for_cleanup(new_filepath)
    mutaBogus.path = new_filepath
    mutaBogus = syn.store(mutaBogus, createOrUpdate=True)
    assert_equals(mutaBogus.id, origBogus.id)
    assert_equals(mutaBogus.versionNumber, 3)
    assert_false(filecmp.cmp(mutaBogus.path, filepath))

    # Make doubly sure the File was uploaded
    checkBogus = syn.get(mutaBogus.id)
    assert_equals(checkBogus.id, origBogus.id)
    assert_equals(checkBogus.versionNumber, 3)
    assert_true(filecmp.cmp(mutaBogus.path, checkBogus.path))

    # Create yet another file with the same name and parent
    # Expected behavior is raising an exception with a 409 error
    newer_filepath = utils.make_bogus_binary_file()
    schedule_for_cleanup(newer_filepath)
    badBogus = File(newer_filepath, name='Bogus Test File', parent=project)
    assert_raises(SynapseHTTPError, syn.store, badBogus, createOrUpdate=False)
    
    # -- Storing after syn.get(..., downloadFile=False) --
    ephemeralBogus = syn.get(mutaBogus, downloadFile=False)
    ephemeralBogus.description = 'Snorklewacker'
    ephemeralBogus.shoe_size = 11.5
    ephemeralBogus = syn.store(ephemeralBogus)

    ephemeralBogus = syn.get(ephemeralBogus, downloadFile=False)
    assert_equals(ephemeralBogus.description, 'Snorklewacker')
    assert_equals(ephemeralBogus.shoe_size, [11.5])
def test_store_with_flags():
    # -- CreateOrUpdate flag for Projects --
    # If we store a project with the same name, it should become an update
    projUpdate = Project(project.name)
    projUpdate.updatedThing = 'Yep, sho\'nuf it\'s updated!'
    projUpdate = syn.store(projUpdate, createOrUpdate=True)
    assert project.id == projUpdate.id
    assert projUpdate.updatedThing == ['Yep, sho\'nuf it\'s updated!']

    # Store a File
    filepath = utils.make_bogus_binary_file()
    schedule_for_cleanup(filepath)
    origBogus = File(filepath, name='Bogus Test File', parent=project)
    origBogus = syn.store(origBogus, createOrUpdate=True)
    assert origBogus.versionNumber == 1
    
    # -- ForceVersion flag --
    # Re-store the same thing and don't up the version
    mutaBogus = syn.store(origBogus, forceVersion=False)
    assert mutaBogus.versionNumber == 1
    
    # Re-store again, essentially the same condition
    mutaBogus = syn.store(mutaBogus, createOrUpdate=True, forceVersion=False)
    assert mutaBogus.versionNumber == 1
    
    # And again, but up the version this time
    mutaBogus = syn.store(mutaBogus, forceVersion=True)
    assert mutaBogus.versionNumber == 2

    # -- CreateOrUpdate flag for files --
    # Store a different file with the same name and parent
    # Expected behavior is that a new version of the first File will be created
    new_filepath = utils.make_bogus_binary_file()
    schedule_for_cleanup(new_filepath)
    mutaBogus.path = new_filepath
    mutaBogus = syn.store(mutaBogus, createOrUpdate=True)
    assert mutaBogus.id == origBogus.id
    assert mutaBogus.versionNumber == 3
    assert not filecmp.cmp(mutaBogus.path, filepath)

    # Make doubly sure the File was uploaded
    checkBogus = syn.get(mutaBogus.id)
    assert checkBogus.id == origBogus.id
    assert checkBogus.versionNumber == 3
    assert filecmp.cmp(mutaBogus.path, checkBogus.path)

    # Create yet another file with the same name and parent
    # Expected behavior is raising an exception with a 409 error
    newer_filepath = utils.make_bogus_binary_file()
    schedule_for_cleanup(newer_filepath)
    badBogus = File(newer_filepath, name='Bogus Test File', parent=project)
    assert_raises(SynapseHTTPError, syn.store, badBogus, createOrUpdate=False)
    
    # -- Storing after syn.get(..., downloadFile=False) --
    ephemeralBogus = syn.get(mutaBogus, downloadFile=False)
    ephemeralBogus.description = 'Snorklewacker'
    ephemeralBogus.shoe_size = 11.5
    ephemeralBogus = syn.store(ephemeralBogus)

    ephemeralBogus = syn.get(ephemeralBogus, downloadFile=False)
    assert ephemeralBogus.description == 'Snorklewacker'
    assert ephemeralBogus.shoe_size == [11.5]