예제 #1
0
def test_downloadFile():
    # See if the a "wget" works
    filename = utils.download_file(
        "http://dev-versions.synapse.sagebase.org/sage_bionetworks_logo_274x128.png"
    )
    schedule_for_cleanup(filename)
    assert_true(os.path.exists(filename))
예제 #2
0
def test_teams():
    name = "My Uniquely Named Team " + str(uuid.uuid4())
    team = syn.store(Team(name=name, description="A fake team for testing..."))
    schedule_for_cleanup(team)

    found_team = syn.getTeam(team.id)
    assert_equals(team, found_team)

    p = syn.getUserProfile()
    found = None
    for m in syn.getTeamMembers(team):
        if m.member.ownerId == p.ownerId:
            found = m
            break

    assert_is_not_none(found,
                       "Couldn't find user {} in team".format(p.userName))

    # needs to be retried 'cause appending to the search index is asynchronous
    tries = 10
    found_team = None
    while tries > 0:
        try:
            found_team = syn.getTeam(name)
            break
        except ValueError:
            tries -= 1
            if tries > 0:
                time.sleep(1)
    assert_equals(team, found_team)
예제 #3
0
def test_uploadFileEntity():
    # Create a FileEntity
    # Dictionaries default to FileEntity as a type
    fname = utils.make_bogus_data_file()
    schedule_for_cleanup(fname)
    entity = File(name='fooUploadFileEntity',
                  path=fname,
                  parentId=project['id'],
                  description='A test file entity')
    entity = syn.store(entity)

    # Download and verify
    entity = syn.get(entity)

    assert_equals(entity['files'][0], os.path.basename(fname))
    assert_true(filecmp.cmp(fname, entity['path']))

    # Check if we upload the wrong type of file handle
    fh = syn.restGET('/entity/%s/filehandles' % entity.id)['list'][0]
    assert_equals(fh['concreteType'],
                  'org.sagebionetworks.repo.model.file.S3FileHandle')

    # Create a different temporary file
    fname = utils.make_bogus_data_file()
    schedule_for_cleanup(fname)

    # Update existing FileEntity
    entity.path = fname
    entity = syn.store(entity)

    # Download and verify that it is the same file
    entity = syn.get(entity)
    assert_equals(entity['files'][0], os.path.basename(fname))
    assert_true(filecmp.cmp(fname, entity['path']))
예제 #4
0
def test_synapseStore_flag():
    # Store a path to a local file
    path = utils.make_bogus_data_file()
    schedule_for_cleanup(path)
    bogus = File(path, name='Totally bogus data', parent=project, synapseStore=False)
    bogus = syn.store(bogus)

    # Verify the thing can be downloaded as a URL
    bogus = syn.get(bogus, downloadFile=False)
    assert_equals(bogus.name, 'Totally bogus data')
    assert_equals(bogus.path, path, "Path: %s\nExpected: %s" % (bogus.path, path))
    assert_false(bogus.synapseStore)

    # Make sure the test runs on Windows and other OS's
    if path[0].isalpha() and path[1] == ':':
        # A Windows file URL looks like this: file:///c:/foo/bar/bat.txt
        expected_url = 'file:///' + path.replace("\\", "/")
    else:
        expected_url = 'file://' + path

    assert_equals(bogus.externalURL, expected_url, 'URL: %s\nExpected %s' % (bogus.externalURL, expected_url))

    # A file path that doesn't exist should still work
    bogus = File('/path/to/local/file1.xyz', parentId=project.id, synapseStore=False)
    bogus = syn.store(bogus)
    assert_raises(IOError, syn.get, bogus)
    assert_false(bogus.synapseStore)

    # Try a URL
    bogus = File('http://dev-versions.synapse.sagebase.org/synapsePythonClient', parent=project, synapseStore=False)
    bogus = syn.store(bogus)
    bogus = syn.get(bogus)
    assert_false(bogus.synapseStore)
예제 #5
0
def test_getChildren():
    # setup a hierarchy for folders
    # PROJECT
    # |     \
    # File   Folder
    #           |
    #         File
    project_name = str(uuid.uuid1())
    test_project = syn.store(Project(name=project_name))
    folder = syn.store(Folder(name="firstFolder", parent=test_project))
    syn.store(
        File(path="~/doesntMatter.txt",
             name="file inside folders",
             parent=folder,
             synapseStore=False))
    project_file = syn.store(
        File(path="~/doesntMatterAgain.txt",
             name="file inside project",
             parent=test_project,
             synapseStore=False))
    schedule_for_cleanup(test_project)

    expected_id_set = {project_file.id, folder.id}
    children_id_set = {x['id'] for x in syn.getChildren(test_project.id)}
    assert_equals(expected_id_set, children_id_set)
예제 #6
0
def test_get_local_file():
    new_path = utils.make_bogus_data_file()
    schedule_for_cleanup(new_path)
    folder = Folder('TestFindFileFolder', parent=project, description='A place to put my junk')
    folder = syn.store(folder)

    # Get an nonexistent file in Synapse
    assert_raises(SynapseError, syn.get, new_path)

    # Get a file really stored in Synapse
    ent_folder = syn.store(File(new_path, parent=folder))
    ent2 = syn.get(new_path)
    assert_equals(ent_folder.id, ent2.id)
    assert_equals(ent_folder.versionNumber, ent2.versionNumber)

    # Get a file stored in Multiple locations #should display warning
    syn.store(File(new_path, parent=project))
    syn.get(new_path)

    # Get a file stored in multiple locations with limit set
    ent = syn.get(new_path, limitSearch=folder.id)
    assert_equals(ent.id, ent_folder.id)
    assert_equals(ent.versionNumber, ent_folder.versionNumber)

    # Get a file that exists but such that limitSearch removes them and raises error
    assert_raises(SynapseError, syn.get, new_path, limitSearch='syn1')
예제 #7
0
def test_store__changing_from_Synapse_to_externalURL_by_changing_path():
    # create a temp file
    temp_path = utils.make_bogus_data_file()
    schedule_for_cleanup(temp_path)

    ext = syn.store(File(temp_path, parent=project, synapseStore=True))
    ext = syn.get(ext)
    assert_equal("org.sagebionetworks.repo.model.file.S3FileHandle", ext._file_handle.concreteType)

    ext.synapseStore = False
    ext = syn.store(ext)

    # do a get to make sure filehandle has been updated correctly
    ext = syn.get(ext.id, downloadFile=True)
    assert_equal("org.sagebionetworks.repo.model.file.ExternalFileHandle", ext._file_handle.concreteType)
    assert_equal(utils.as_url(temp_path), ext.externalURL)
    assert_equal(False, ext.synapseStore)

    # swap back to synapse storage
    ext.synapseStore = True
    ext = syn.store(ext)
    # do a get to make sure filehandle has been updated correctly
    ext = syn.get(ext.id, downloadFile=True)
    assert_equal("org.sagebionetworks.repo.model.file.S3FileHandle", ext._file_handle.concreteType)
    assert_equal(None, ext.externalURL)
    assert_equal(True, ext.synapseStore)
예제 #8
0
def _makeManifest(content):
    with tempfile.NamedTemporaryFile(mode='w', suffix=".dat",
                                     delete=False) as f:
        f.write(content)
        filepath = utils.normalize_path(f.name)
    schedule_for_cleanup(filepath)
    return filepath
예제 #9
0
def test_store_activity():
    # Create a File and an Activity
    path = utils.make_bogus_binary_file()
    schedule_for_cleanup(path)
    entity = File(path, name='Hinkle horn honking holes', parent=project)
    honking = Activity(name='Hinkle horn honking', 
                       description='Nettlebed Cave is a limestone cave located on the South Island of New Zealand.')
    honking.used('http://www.flickr.com/photos/bevanbfree/3482259379/')
    honking.used('http://www.flickr.com/photos/bevanbfree/3482185673/')

    # This doesn't set the ID of the Activity
    entity = syn.store(entity, activity=honking)

    # But this does
    honking = syn.getProvenance(entity.id)

    # Verify the Activity
    assert_equals(honking['name'], 'Hinkle horn honking')
    assert_equals(len(honking['used']), 2)
    assert_equals(honking['used'][0]['concreteType'], 'org.sagebionetworks.repo.model.provenance.UsedURL')
    assert_false(honking['used'][0]['wasExecuted'])
    assert_true(honking['used'][0]['url'].startswith('http://www.flickr.com/photos/bevanbfree/3482'))
    assert_equals(honking['used'][1]['concreteType'], 'org.sagebionetworks.repo.model.provenance.UsedURL')
    assert_false(honking['used'][1]['wasExecuted'])

    # Store another Entity with the same Activity
    entity = File('http://en.wikipedia.org/wiki/File:Nettlebed_cave.jpg', 
                  name='Nettlebed Cave', parent=project, synapseStore=False)
    entity = syn.store(entity, activity=honking)

    # The Activities should match
    honking2 = syn.getProvenance(entity)
    assert_equals(honking['id'], honking2['id'])
예제 #10
0
def test_uploadFile_given_dictionary():
    # Make a Folder Entity the old fashioned way
    folder = {
        'concreteType': Folder._synapse_entity_type,
        'parentId': project['id'],
        'name': 'fooDictionary',
        'foo': 334455
    }
    entity = syn.store(folder)

    # Download and verify that it is the same file
    entity = syn.get(entity)
    assert_equals(entity.parentId, project.id)
    assert_equals(entity.foo[0], 334455)

    # Update via a dictionary
    path = utils.make_bogus_data_file()
    schedule_for_cleanup(path)
    rareCase = {}
    rareCase.update(entity.annotations)
    rareCase.update(entity.properties)
    rareCase.update(entity.local_state())
    rareCase[
        'description'] = 'Updating with a plain dictionary should be rare.'

    # Verify it works
    entity = syn.store(rareCase)
    assert_equals(entity.description, rareCase['description'])
    assert_equals(entity.name, 'fooDictionary')
    syn.get(entity['id'])
예제 #11
0
def test_round_trip():
    fhid = None
    filepath = utils.make_bogus_binary_file(MIN_PART_SIZE + 777771)
    try:
        fhid = multipart_upload_file(syn, filepath)

        # Download the file and compare it with the original
        junk = File(parent=project, dataFileHandleId=fhid)
        junk.properties.update(syn._createEntity(junk.properties))
        (tmp_f, tmp_path) = tempfile.mkstemp()
        schedule_for_cleanup(tmp_path)

        junk['path'] = syn._downloadFileHandle(fhid, junk['id'], 'FileEntity', tmp_path)
        assert_true(filecmp.cmp(filepath, junk.path))

    finally:
        try:
            if 'junk' in locals():
                syn.delete(junk)
        except Exception:
            print(traceback.format_exc())
        try:
            os.remove(filepath)
        except Exception:
            print(traceback.format_exc())
예제 #12
0
def test_multipart_upload_big_string():
    cities = ["Seattle", "Portland", "Vancouver", "Victoria",
              "San Francisco", "Los Angeles", "New York",
              "Oaxaca", "Cancún", "Curaçao", "जोधपुर",
              "অসম", "ལྷ་ས།", "ཐིམ་ཕུ་", "دبي", "አዲስ አበባ",
              "São Paulo", "Buenos Aires", "Cartagena",
              "Amsterdam", "Venice", "Rome", "Dubrovnik",
              "Sarajevo", "Madrid", "Barcelona", "Paris",
              "Αθήνα", "Ρόδος", "København", "Zürich",
              "金沢市", "서울", "แม่ฮ่องสอน", "Москва"]

    text = "Places I wanna go:\n"
    while len(text.encode('utf-8')) < MIN_PART_SIZE:
        text += ", ".join(random.choice(cities) for i in range(5000)) + "\n"

    fhid = multipart_upload_string(syn, text)

    # Download the file and compare it with the original
    junk = File(parent=project, dataFileHandleId=fhid)
    junk.properties.update(syn._createEntity(junk.properties))
    (tmp_f, tmp_path) = tempfile.mkstemp()
    schedule_for_cleanup(tmp_path)

    junk['path'] = syn._downloadFileHandle(fhid, junk['id'], "FileEntity", tmp_path)

    with open(junk.path, encoding='utf-8') as f:
        retrieved_text = f.read()

    assert_equals(retrieved_text, text)
예제 #13
0
def test_synapse_integer_columns_with_missing_values_from_dataframe():
    # SYNPY-267
    cols = [
        Column(name='x', columnType='STRING'),
        Column(name='y', columnType='INTEGER'),
        Column(name='z', columnType='DOUBLE')
    ]
    schema = syn.store(Schema(name='Big Table', columns=cols, parent=project))

    line_terminator = str(os.linesep)
    # write rows to CSV file
    with tempfile.NamedTemporaryFile(mode="w", suffix=".csv",
                                     delete=False) as temp:
        schedule_for_cleanup(temp.name)
        # 2nd row is missing a value in its integer column
        temp.write('x,y,z' + line_terminator + 'a,1,0.9' + line_terminator +
                   'b,,0.8' + line_terminator + 'c,3,0.7' + line_terminator)
        temp.flush()
        filename = temp.name

    # create a table from csv
    table = Table(schema, filename)
    df = table.asDataFrame()

    table_from_dataframe = Table(schema, df)
    assert_not_equal(table.filepath, table_from_dataframe.filepath)
    df2 = table_from_dataframe.asDataFrame()
    assert_frame_equal(df, df2)
예제 #14
0
def dontruntest_big_csvs():
    cols = [
        Column(name='name', columnType='STRING', maximumSize=1000),
        Column(name='foo',
               columnType='STRING',
               enumValues=['foo', 'bar', 'bat']),
        Column(name='x', columnType='DOUBLE'),
        Column(name='n', columnType='INTEGER'),
        Column(name='is_bogus', columnType='BOOLEAN')
    ]

    schema1 = syn.store(Schema(name='Big Table', columns=cols, parent=project))

    # write rows to CSV file
    with tempfile.NamedTemporaryFile(delete=False) as temp:
        schedule_for_cleanup(temp.name)
        filename = temp.name

    with io.open(filename, mode='w', encoding="utf-8", newline='') as temp:
        writer = csv.writer(temp,
                            quoting=csv.QUOTE_NONNUMERIC,
                            lineterminator=str(os.linesep))
        writer.writerow([col.name for col in cols])

        for i in range(10):
            for j in range(100):
                foo = cols[1].enumValues[random.randint(0, 2)]
                writer.writerow(
                    ('Robot ' + str(i * 100 + j), foo, random.random() * 200.0,
                     random.randint(0, 100), random.random() >= 0.5))
    # upload CSV
    syn._uploadCsv(filepath=temp.name, schema=schema1)

    from synapseclient.table import CsvFileTable
    CsvFileTable.from_table_query(syn, "select * from %s" % schema1.id)
예제 #15
0
def test_store_file_handle_update_metadata():
    original_file_path = utils.make_bogus_data_file()
    schedule_for_cleanup(original_file_path)

    # upload the project
    entity = syn.store(File(original_file_path, parent=project))
    old_file_handle = entity._file_handle

    # create file handle to replace the old one
    replacement_file_path = utils.make_bogus_data_file()
    schedule_for_cleanup(replacement_file_path)
    new_file_handle = syn.uploadFileHandle(replacement_file_path, parent=project)

    entity.dataFileHandleId = new_file_handle['id']
    new_entity = syn.store(entity)

    # make sure _file_handle info was changed
    # (_file_handle values are all changed at once so just verifying id change is sufficient)
    assert_equal(new_file_handle['id'], new_entity._file_handle['id'])
    assert_not_equal(old_file_handle['id'], new_entity._file_handle['id'])

    # check that local_state was updated
    assert_equal(replacement_file_path, new_entity.path)
    assert_equal(os.path.dirname(replacement_file_path), new_entity.cacheDir)
    assert_equal([os.path.basename(replacement_file_path)], new_entity.files)
예제 #16
0
def testMoveProject():
    proj1 = syn.store(Project(name=str(uuid.uuid4()) +
                              "testMoveProject-child"))
    proj2 = syn.store(
        Project(name=str(uuid.uuid4()) + "testMoveProject-newParent"))
    assert_raises(SynapseHTTPError, syn.move, proj1, proj2)
    schedule_for_cleanup(proj1)
    schedule_for_cleanup(proj2)
예제 #17
0
def test_download_local_file_URL_path():
    path = utils.make_bogus_data_file()
    schedule_for_cleanup(path)

    filehandle = create_external_file_handle(syn, path, mimetype=None, file_size=None)

    localFileEntity = syn.store(File(dataFileHandleId=filehandle['id'], parent=project))
    e = syn.get(localFileEntity.id)
    assert_equal(path, e.path)
예제 #18
0
def _create_temp_file_with_cleanup(specific_file_text=None):
    if specific_file_text:
        with tempfile.NamedTemporaryFile(mode="w", suffix=".txt",
                                         delete=False) as file:
            file.write(specific_file_text)
            filename = file.name
    else:
        filename = utils.make_bogus_data_file()
    schedule_for_cleanup(filename)
    return filename
예제 #19
0
def test_store_isRestricted_flag():
    # Store a file with access requirements
    path = utils.make_bogus_binary_file()
    schedule_for_cleanup(path)
    entity = File(path, name='Secret human data', parent=project)
    
    # We don't want to spam ACT with test emails
    with patch('synapseclient.client.Synapse._createAccessRequirementIfNone') as intercepted:
        entity = syn.store(entity, isRestricted=True)
        assert_true(intercepted.called)
예제 #20
0
def test_syncToSynapse():
    # Test upload of accurate manifest
    manifest = _makeManifest(header + row1 + row2 + row3)
    synapseutils.syncToSynapse(syn, manifest, sendMessages=False, retries=2)

    # syn.getChildren() used by syncFromSynapse() may intermittently have timing issues
    time.sleep(3)

    # Download using syncFromSynapse
    tmpdir = tempfile.mkdtemp()
    schedule_for_cleanup(tmpdir)
    entities = synapseutils.syncFromSynapse(syn, project, path=tmpdir)

    orig_df = pd.read_csv(manifest, sep='\t')
    orig_df.index = [os.path.basename(p) for p in orig_df.path]
    new_df = pd.read_csv(os.path.join(tmpdir,
                                      synapseutils.sync.MANIFEST_FILENAME),
                         sep='\t')
    new_df.index = [os.path.basename(p) for p in new_df.path]

    assert_equals(len(orig_df), len(new_df))
    new_df = new_df.loc[orig_df.index]

    # Validate what was uploaded is in right location
    assert_true(new_df.parent.equals(orig_df.parent),
                'Downloaded files not stored in same location')

    # Validate that annotations were set
    cols = synapseutils.sync.REQUIRED_FIELDS + synapseutils.sync.FILE_CONSTRUCTOR_FIELDS\
           + synapseutils.sync.STORE_FUNCTION_FIELDS
    orig_anots = orig_df.drop(cols, axis=1, errors='ignore')
    new_anots = new_df.drop(cols, axis=1, errors='ignore')
    assert_equals(
        orig_anots.shape[1],
        new_anots.shape[1])  # Verify that we have the same number of cols
    assert_true(new_anots.equals(orig_anots.loc[:, new_anots.columns]),
                'Annotations different')

    # Validate that provenance is correct
    for provenanceType in ['executed', 'used']:
        # Go through each row
        for orig, new in zip(orig_df[provenanceType], new_df[provenanceType]):
            if not pd.isnull(orig) and not pd.isnull(new):
                # Convert local file paths into synId.versionNumber strings
                orig_list = [
                    '%s.%s' %
                    (i.id, i.versionNumber) if isinstance(i, Entity) else i
                    for i in syn._convertProvenanceList(orig.split(';'))
                ]
                new_list = [
                    '%s.%s' %
                    (i.id, i.versionNumber) if isinstance(i, Entity) else i
                    for i in syn._convertProvenanceList(new.split(';'))
                ]
                assert_equals(set(orig_list), set(new_list))
예제 #21
0
def test_syncFromSynapse__given_file_id():
    file_path = utils.make_bogus_data_file()
    schedule_for_cleanup(file_path)
    file = syn.store(
        File(file_path,
             name=str(uuid.uuid4()),
             parent=project,
             synapseStore=False))
    all_files = synapseutils.syncFromSynapse(syn, file.id)
    assert_equals(1, len(all_files))
    assert_equals(file, all_files[0])
예제 #22
0
def testCustomConfigFile():
    if os.path.isfile(client.CONFIG_FILE):
        configPath = './CONFIGFILE'
        shutil.copyfile(client.CONFIG_FILE, configPath)
        schedule_for_cleanup(configPath)

        syn2 = Synapse(configPath=configPath)
        syn2.login()
    else:
        raise ValueError(
            "Please supply a username and password in the configuration file.")
예제 #23
0
def setup(module):
    module.syn = integration.syn
    module.project = integration.project

    # Use the module-level syn object to communicate between main and child threads
    # - Read-only objects (for the children)
    module.syn.test_parent = module.syn.store(Project(name=str(uuid.uuid4())))
    schedule_for_cleanup(module.syn.test_parent)
    module.syn.test_keepRunning = True

    # - Child writeable objects
    module.syn.test_errors = Queue()
    module.syn.test_runCountMutex = Lock()
    module.syn.test_threadsRunning = 0
예제 #24
0
def test_entity_view_add_annotation_columns():
    folder1 = syn.store(
        Folder(name=str(uuid.uuid4()) +
               'test_entity_view_add_annotation_columns_proj1',
               parent=project,
               annotations={
                   'strAnno': 'str1',
                   'intAnno': 1,
                   'floatAnno': 1.1
               }))
    folder2 = syn.store(
        Folder(name=str(uuid.uuid4()) +
               'test_entity_view_add_annotation_columns_proj2',
               parent=project,
               annotations={
                   'dateAnno': datetime.now(),
                   'strAnno': 'str2',
                   'intAnno': 2
               }))
    schedule_for_cleanup(folder1)
    schedule_for_cleanup(folder2)
    scopeIds = [utils.id_of(folder1), utils.id_of(folder2)]

    # This test is to ensure that user code which use the deprecated field `type` continue to work
    # TODO: remove this test case in Synapse Python client 2.0
    entity_view = EntityViewSchema(name=str(uuid.uuid4()),
                                   scopeIds=scopeIds,
                                   addDefaultViewColumns=False,
                                   addAnnotationColumns=True,
                                   type='project',
                                   parent=project)
    syn.store(entity_view)
    # This test is to ensure that user code which use the deprecated field `type` continue to work
    # TODO: remove this test case in Synapse Python client 2.0
    entity_view = EntityViewSchema(name=str(uuid.uuid4()),
                                   scopeIds=scopeIds,
                                   addDefaultViewColumns=False,
                                   addAnnotationColumns=True,
                                   type='file',
                                   includeEntityTypes=[EntityViewType.PROJECT],
                                   parent=project)
    syn.store(entity_view)

    entity_view = EntityViewSchema(name=str(uuid.uuid4()),
                                   scopeIds=scopeIds,
                                   addDefaultViewColumns=False,
                                   addAnnotationColumns=True,
                                   includeEntityTypes=[EntityViewType.PROJECT],
                                   parent=project)
    syn.store(entity_view)
예제 #25
0
def testSetStorageLocation():
    proj = syn.store(
        Project(name=str(uuid.uuid4()) +
                "testSetStorageLocation__existing_storage_location"))
    schedule_for_cleanup(proj)

    endpoint = "https://url.doesnt.matter.com"
    bucket = "fake-bucket-name"
    storage_location = syn.createStorageLocationSetting(
        "ExternalObjectStorage", endpointUrl=endpoint, bucket=bucket)
    storage_setting = syn.setStorageLocation(
        proj, storage_location['storageLocationId'])
    retrieved_setting = syn.getProjectSetting(proj, 'upload')
    assert_equals(storage_setting, retrieved_setting)
예제 #26
0
    def setup(self):
        # Create a Project
        self.project_entity = syn.store(Project(name=str(uuid.uuid4())))
        filename = utils.make_bogus_data_file()
        attachname = utils.make_bogus_data_file()
        file_entity = syn.store(File(filename, parent=self.project_entity))

        schedule_for_cleanup(self.project_entity.id)
        schedule_for_cleanup(filename)
        schedule_for_cleanup(file_entity.id)

        # Create mock wiki
        md = """
        This is a test wiki
        =======================
    
        Blabber jabber blah blah boo.
        syn123
        syn456
        """

        wiki = Wiki(owner=self.project_entity,
                    title='A Test Wiki',
                    markdown=md,
                    attachments=[attachname])
        wiki = syn.store(wiki)

        # Create a Wiki sub-page
        subwiki = Wiki(owner=self.project_entity,
                       title='A sub-wiki',
                       markdown='%s' % file_entity.id,
                       parentWikiId=wiki.id)
        self.subwiki = syn.store(subwiki)

        second_md = """
        Testing internal links
        ======================
    
        [test](#!Synapse:%s/wiki/%s)
    
        %s)
        """ % (self.project_entity.id, self.subwiki.id, file_entity.id)

        sub_subwiki = Wiki(owner=self.project_entity,
                           title='A sub-sub-wiki',
                           markdown=second_md,
                           parentWikiId=self.subwiki.id,
                           attachments=[attachname])
        self.sub_subwiki = syn.store(sub_subwiki)

        # Set up the second project
        self.second_project = syn.store(Project(name=str(uuid.uuid4())))
        schedule_for_cleanup(self.second_project.id)

        self.fileMapping = {'syn123': 'syn12345', 'syn456': 'syn45678'}

        self.first_headers = syn.getWikiHeaders(self.project_entity)
예제 #27
0
def test_provenance():
    # Create a File Entity
    fname = utils.make_bogus_data_file()
    schedule_for_cleanup(fname)
    data_entity = syn.store(File(fname, parent=project['id']))

    # Create a File Entity of Code
    fd, path = tempfile.mkstemp(suffix=".py")
    with os.fdopen(fd, 'w') as f:
        f.write(
            utils.normalize_lines("""
            ## Chris's fabulous random data generator
            ############################################################
            import random
            random.seed(12345)
            data = [random.gauss(mu=0.0, sigma=1.0) for i in range(100)]
            """))
    schedule_for_cleanup(path)
    code_entity = syn.store(File(path, parent=project['id']))

    # Create a new Activity asserting that the Code Entity was 'used'
    activity = Activity(name='random.gauss',
                        description='Generate some random numbers')
    activity.used(code_entity, wasExecuted=True)
    activity.used(
        {
            'name': 'Superhack',
            'url': 'https://github.com/joe_coder/Superhack'
        },
        wasExecuted=True)
    activity = syn.setProvenance(data_entity, activity)

    # Retrieve and verify the saved Provenance record
    retrieved_activity = syn.getProvenance(data_entity)
    assert_equals(retrieved_activity, activity)

    # Test Activity update
    new_description = 'Generate random numbers like a gangsta'
    retrieved_activity['description'] = new_description
    updated_activity = syn.updateActivity(retrieved_activity)
    assert_equals(updated_activity['name'], retrieved_activity['name'])
    assert_equals(updated_activity['description'], new_description)

    # Test delete
    syn.deleteProvenance(data_entity)
    assert_raises(SynapseHTTPError, syn.getProvenance, data_entity['id'])
예제 #28
0
def test_entity_version():
    # Make an Entity and make sure the version is one
    entity = File(parent=project['id'])
    entity['path'] = utils.make_bogus_data_file()
    schedule_for_cleanup(entity['path'])
    entity = syn.store(entity)

    syn.setAnnotations(entity, {'fizzbuzz': 111222})
    entity = syn.get(entity)
    assert_equals(entity.versionNumber, 1)

    # Update the Entity and make sure the version is incremented
    entity.foo = 998877
    entity['name'] = 'foobarbat'
    entity['description'] = 'This is a test entity...'
    entity = syn.store(entity,
                       incrementVersion=True,
                       versionLabel="Prada remix")
    assert_equals(entity.versionNumber, 2)

    # Get the older data and verify the random stuff is still there
    annotations = syn.getAnnotations(entity, version=1)
    assert_equals(annotations['fizzbuzz'][0], 111222)
    returnEntity = syn.get(entity, version=1)
    assert_equals(returnEntity.versionNumber, 1)
    assert_equals(returnEntity['fizzbuzz'][0], 111222)
    assert_not_in('foo', returnEntity)

    # Try the newer Entity
    returnEntity = syn.get(entity)
    assert_equals(returnEntity.versionNumber, 2)
    assert_equals(returnEntity['foo'][0], 998877)
    assert_equals(returnEntity['name'], 'foobarbat')
    assert_equals(returnEntity['description'], 'This is a test entity...')
    assert_equals(returnEntity['versionLabel'], 'Prada remix')

    # Try the older Entity again
    returnEntity = syn.get(entity, version=1)
    assert_equals(returnEntity.versionNumber, 1)
    assert_equals(returnEntity['fizzbuzz'][0], 111222)
    assert_not_in('foo', returnEntity)

    # Delete version 2
    syn.delete(entity, version=2)
    returnEntity = syn.get(entity)
    assert_equals(returnEntity.versionNumber, 1)
예제 #29
0
def test_randomly_failing_parts():
    FAILURE_RATE = 1.0/3.0
    fhid = None
    MIN_PART_SIZE = 5 * MB
    MAX_RETRIES = 20

    filepath = utils.make_bogus_binary_file(MIN_PART_SIZE * 2 + 777771)

    normal_put_chunk = None

    def _put_chunk_or_fail_randomly(url, chunk, verbose=False):
        if random.random() < FAILURE_RATE:
            raise IOError("Ooops! Artificial upload failure for testing.")
        else:
            return normal_put_chunk(url, chunk, verbose)

    # Mock _put_chunk to fail randomly
    normal_put_chunk = multipart_upload._put_chunk
    multipart_upload._put_chunk = _put_chunk_or_fail_randomly

    try:
        fhid = multipart_upload_file(syn, filepath)

        # Download the file and compare it with the original
        junk = File(parent=project, dataFileHandleId=fhid)
        junk.properties.update(syn._createEntity(junk.properties))
        (tmp_f, tmp_path) = tempfile.mkstemp()
        schedule_for_cleanup(tmp_path)

        junk['path'] = syn._downloadFileHandle(fhid, junk['id'], 'FileEntity', tmp_path)
        assert_true(filecmp.cmp(filepath, junk.path))

    finally:
        # Un-mock _put_chunk
        if normal_put_chunk:
            multipart_upload._put_chunk = normal_put_chunk

        try:
            if 'junk' in locals():
                syn.delete(junk)
        except Exception:
            print(traceback.format_exc())
        try:
            os.remove(filepath)
        except Exception:
            print(traceback.format_exc())
예제 #30
0
def test_download_multithreaded():
    # Create a FileEntity
    # Dictionaries default to FileEntity as a type
    fname = utils.make_bogus_data_file()
    schedule_for_cleanup(fname)
    entity = File(name='testMultiThreadDownload' + str(uuid.uuid4()),
                  path=fname,
                  parentId=project['id'])
    entity = syn.store(entity)

    # Download and verify
    syn.multi_threaded = True
    entity = syn.get(entity)

    assert_equals(entity['files'][0], os.path.basename(fname))
    assert_true(filecmp.cmp(fname, entity['path']))
    syn.multi_threaded = False