def test_configPath():
    """Test using a user-specified configPath for Synapse configuration file."""

    tmp_config_file = tempfile.NamedTemporaryFile(suffix='.synapseConfig', delete=False)
    shutil.copyfile(synapseclient.client.CONFIG_FILE, tmp_config_file.name)

    # Create a File
    filename = utils.make_bogus_data_file()
    schedule_for_cleanup(filename)
    output = run('synapse',
                 '--skip-checks',
                 '--configPath',
                 tmp_config_file.name,
                 'add',
                 '-name',
                 'BogusFileEntityTwo',
                 '-description',
                 'Bogus data to test file upload',
                 '-parentid',
                 project.id,
                 filename)
    file_entity_id = parse(r'Created/Updated entity:\s+(syn\d+)\s+', output)

    # Verify that we stored the file in Synapse
    f1 = syn.get(file_entity_id)
    fh = syn._getFileHandle(f1.dataFileHandleId)
    assert_equals(fh['concreteType'], 'org.sagebionetworks.repo.model.file.S3FileHandle')
def test_store_activity():
    """Test storing entities with Activities"""
    project = create_project()

    path = utils.make_bogus_data_file()
    schedule_for_cleanup(path)

    f = File(path, name='Hinkle horn honking holes', parent=project)

    honking = Activity(name='Hinkle horn honking', description='Nettlebed Cave is a limestone cave located on the South Island of New Zealand.')
    honking.used('http://www.flickr.com/photos/bevanbfree/3482259379/')
    honking.used('http://www.flickr.com/photos/bevanbfree/3482185673/')

    ## doesn't set the ID of the activity
    f = syn.store(f, activity=honking)

    honking = syn.getProvenance(f.id)
    ## now, we have an activity ID

    assert honking['name'] == 'Hinkle horn honking'
    assert len(honking['used']) == 2
    assert honking['used'][0]['concreteType'] == 'org.sagebionetworks.repo.model.provenance.UsedURL'
    assert honking['used'][0]['wasExecuted'] == False
    assert honking['used'][0]['url'].startswith('http://www.flickr.com/photos/bevanbfree/3482')
    assert honking['used'][1]['concreteType'] == 'org.sagebionetworks.repo.model.provenance.UsedURL'
    assert honking['used'][1]['wasExecuted'] == False

    ## store another entity with the same activity
    f2 = File('http://en.wikipedia.org/wiki/File:Nettlebed_cave.jpg', name='Nettlebed Cave', parent=project)
    f2 = syn.store(f2, activity=honking)

    honking2 = syn.getProvenance(f2)

    assert honking['id'] == honking2['id']
Example #3
0
def test_create_or_update_project():
    name = str(uuid.uuid4())

    project = Project(name, a=1, b=2)
    proj_for_cleanup = syn.store(project)
    schedule_for_cleanup(proj_for_cleanup)

    project = Project(name, b=3, c=4)
    project = syn.store(project)

    assert project.a == [1]
    assert project.b == [3]
    assert project.c == [4]

    project = syn.get(project.id)

    assert project.a == [1]
    assert project.b == [3]
    assert project.c == [4]

    project = Project(name, c=5, d=6)
    try:
        project = syn.store(project, createOrUpdate=False)
        assert False, "Expect an exception from storing an existing project with createOrUpdate=False"
    except Exception as ex1:
        pass
def test_caching_of_locationables_containing_zip_files():
    """Test for SYNR-728, cache.retrieve_local_file_info sets cacheDir and files incorrectly for zip files"""
    data = Data(name='qwertyqwer', parent=project['id'])
    path = utils.make_bogus_data_file()
    schedule_for_cleanup(path)

    zip_path = os.path.join(os.path.dirname(path), 'Archive.zip')
    schedule_for_cleanup(zip_path)

    import zipfile
    with zipfile.ZipFile(zip_path, 'w') as zf:
        zf.write(path, os.path.basename(path))

    data['path'] = zip_path
    data = syn.store(data)

    assert data.path == zip_path
    ## should cacheDir and files be filled in here?

    ## remove the files
    os.remove(path)
    os.remove(zip_path)

    ## get the file and store it in the cache. This also has the side
    ## effect of unzipping archive files.
    data2 = syn.get(data.id)

    ## this time it's retreived from the cache. We should still get
    ## the same cacheDir and files as before
    data3 = syn.get(data.id)
    assert data2.cacheDir == data3.cacheDir
    assert data2.files == data3.files
def test_copyFileHandles__copying_cached_file_handles():
    num_files = 3
    file_entities = []

    # upload temp files to synapse
    for i in range(num_files):
        file_path = utils.make_bogus_data_file()
        schedule_for_cleanup(file_path)
        file_entities.append(syn.store(File(file_path, name=str(uuid.uuid1()), parent=project)))

    # a bunch of setup for arguments to the function under test
    file_handles = [file_entity['_file_handle'] for file_entity in file_entities]
    file_entity_ids = [file_entity['id'] for file_entity in file_entities]
    content_types = [file_handle['contentType'] for file_handle in file_handles]
    filenames = [file_handle['fileName'] for file_handle in file_handles]

    # remove every other FileHandle from the cache (at even indicies)
    for i in range(num_files):
        if i % 2 == 0:
            syn.cache.remove(file_handles[i]["id"])

    # get the new list of file_handles
    copiedFileHandles = synapseutils.copyFileHandles(syn, file_handles, ["FileEntity"] * num_files, file_entity_ids,
                                                     content_types, filenames)
    new_file_handle_ids = [copy_result['newFileHandle']['id'] for copy_result in copiedFileHandles['copyResults']]

    # verify that the cached paths are the same
    for i in range(num_files):
        original_path = syn.cache.get(file_handles[i]['id'])
        new_path = syn.cache.get(new_file_handle_ids[i])
        if i % 2 == 0:  # since even indicies are not cached, both should be none
            assert_is_none(original_path)
            assert_is_none(new_path)
        else:  # at odd indicies, the file path should have been copied
            assert_equals(original_path, new_path)
def test_get_and_store_by_name_and_parent_id():
    project = create_project()

    path = utils.make_bogus_data_file()
    schedule_for_cleanup(path)

    f = File(path, name='Foobarbat', parent=project)
    f2 = syn.store(f)
    f = syn.get(f)

    assert f.id == f2.id
    assert f.name == f2.name
    assert f.parentId == f2.parentId

    ## new file
    path = utils.make_bogus_data_file()
    schedule_for_cleanup(path)

    ## should create a new version of the previous File entity
    f3 = File(path, name='Foobarbat', parent=project, description='banana', junk=1234)
    f3 = syn.store(f3)

    ## should be an update of the existing entity with the same name and parent
    assert f3.id == f.id
    assert f3.description == 'banana'
    assert f3.junk == [1234]
    assert filecmp.cmp(path, f3.path)
def test_syncFromSynapse__given_file_id():
    file_path = utils.make_bogus_data_file()
    schedule_for_cleanup(file_path)
    file = syn.store(File(file_path, name=str(uuid.uuid4()), parent=project, synapseStore=False))
    all_files = synapseutils.syncFromSynapse(syn, file.id)
    assert_equals(1, len(all_files))
    assert_equals(file, all_files[0])
def test_slow_unlocker():
    """Manually grabs a lock and makes sure the get/store methods are blocked."""
    
    # Make a file to manually lock
    path = utils.make_bogus_data_file()
    schedule_for_cleanup(path)
    contention = File(path, parent=syn.test_parent)
    contention = syn.store(contention)
    
    # Lock the Cache Map
    cacheDir = cache.determine_cache_directory(contention)
    cache.obtain_lock_and_read_cache(cacheDir)
    
    # Start a few calls to get/store that should not complete yet
    store_thread = wrap_function_as_child_thread(lambda: store_catch_412_HTTPError(contention))
    get_thread = wrap_function_as_child_thread(lambda: syn.get(contention))
    thread.start_new_thread(store_thread, ())
    thread.start_new_thread(get_thread, ())
    time.sleep(min(5, cache.CACHE_LOCK_TIME / 2))
    
    # Make sure the threads did not finish
    assert syn.test_threadsRunning > 0
    cache.write_cache_then_release_lock(cacheDir)
    
    # Let the threads go
    while syn.test_threadsRunning > 0:
        time.sleep(1)
    collect_errors_and_fail()
Example #9
0
def _makeManifest(content):
    with tempfile.NamedTemporaryFile(mode='w', suffix=".dat",
                                     delete=False) as f:
        f.write(content)
        filepath = utils.normalize_path(f.name)
    schedule_for_cleanup(filepath)
    return filepath
def test_uploadFile_given_dictionary():
    # Make a Folder Entity the old fashioned way
    folder = {'concreteType': Folder._synapse_entity_type, 
            'parentId'  : project['id'], 
            'name'      : 'fooDictionary',
            'foo'       : 334455}
    entity = syn.store(folder)
    
    # Download and verify that it is the same file
    entity = syn.get(entity)
    assert entity.parentId == project.id
    assert entity.foo[0] == 334455

    # Update via a dictionary
    path = utils.make_bogus_data_file()
    schedule_for_cleanup(path)
    rareCase = {}
    rareCase.update(entity.annotations)
    rareCase.update(entity.properties)
    rareCase.update(entity.local_state())
    rareCase['description'] = 'Updating with a plain dictionary should be rare.'

    # Verify it works
    entity = syn.store(rareCase)
    assert entity.description == rareCase['description']
    assert entity.name == 'fooDictionary'
    entity = syn.get(entity['id'])
def test_store_activity():
    # Create a File and an Activity
    path = utils.make_bogus_binary_file()
    schedule_for_cleanup(path)
    entity = File(path, name='Hinkle horn honking holes', parent=project)
    honking = Activity(name='Hinkle horn honking', 
                       description='Nettlebed Cave is a limestone cave located on the South Island of New Zealand.')
    honking.used('http://www.flickr.com/photos/bevanbfree/3482259379/')
    honking.used('http://www.flickr.com/photos/bevanbfree/3482185673/')

    # This doesn't set the ID of the Activity
    entity = syn.store(entity, activity=honking)

    # But this does
    honking = syn.getProvenance(entity.id)

    # Verify the Activity
    assert honking['name'] == 'Hinkle horn honking'
    assert len(honking['used']) == 2
    assert honking['used'][0]['concreteType'] == 'org.sagebionetworks.repo.model.provenance.UsedURL'
    assert honking['used'][0]['wasExecuted'] == False
    assert honking['used'][0]['url'].startswith('http://www.flickr.com/photos/bevanbfree/3482')
    assert honking['used'][1]['concreteType'] == 'org.sagebionetworks.repo.model.provenance.UsedURL'
    assert honking['used'][1]['wasExecuted'] == False

    # Store another Entity with the same Activity
    entity = File('http://en.wikipedia.org/wiki/File:Nettlebed_cave.jpg', 
                  name='Nettlebed Cave', parent=project, synapseStore=False)
    entity = syn.store(entity, activity=honking)

    # The Activities should match
    honking2 = syn.getProvenance(entity)
    assert honking['id'] == honking2['id']
def test_configPath():
    """Test using a user-specified configPath for Synapse configuration file.

    """

    tmp_config_file = tempfile.NamedTemporaryFile(suffix='.synapseConfig', delete=False)
    shutil.copyfile(synapseclient.client.CONFIG_FILE, tmp_config_file.name)

    # Create a File
    filename = utils.make_bogus_data_file()
    schedule_for_cleanup(filename)
    output = run('synapse',
                 '--skip-checks',
                 '--configPath',
                 tmp_config_file.name,
                 'add',
                 '-name',
                 'BogusFileEntityTwo',
                 '-description',
                 'Bogus data to test file upload',
                 '-parentid',
                 project.id,
                 filename)
    file_entity_id = parse(r'Created/Updated entity:\s+(syn\d+)\s+', output)

    # Verify that we stored the file in Synapse
    f1 = syn.get(file_entity_id)
    fh = syn._getFileHandle(f1.dataFileHandleId)
    assert fh['concreteType'] == 'org.sagebionetworks.repo.model.file.S3FileHandle'
Example #13
0
def test_entity_view_add_annotation_columns():
    folder1 = syn.store(
        Folder(name=str(uuid.uuid4()) +
               'test_entity_view_add_annotation_columns_proj1',
               parent=project,
               annotations={
                   'strAnno': 'str1',
                   'intAnno': 1,
                   'floatAnno': 1.1
               }))
    folder2 = syn.store(
        Folder(name=str(uuid.uuid4()) +
               'test_entity_view_add_annotation_columns_proj2',
               parent=project,
               annotations={
                   'dateAnno': datetime.now(),
                   'strAnno': 'str2',
                   'intAnno': 2
               }))
    schedule_for_cleanup(folder1)
    schedule_for_cleanup(folder2)
    scopeIds = [utils.id_of(folder1), utils.id_of(folder2)]

    entity_view = EntityViewSchema(name=str(uuid.uuid4()),
                                   scopeIds=scopeIds,
                                   addDefaultViewColumns=False,
                                   addAnnotationColumns=True,
                                   type='project',
                                   parent=project)
    syn.store(entity_view)
def test_downloadFile():
    # See if the a "wget" works
    filename = utils.download_file(
        "http://dev-versions.synapse.sagebase.org/sage_bionetworks_logo_274x128.png"
    )
    schedule_for_cleanup(filename)
    assert os.path.exists(filename)
def test_synStore_sftpIntegration():
    """Creates a File Entity on an sftp server and add the external url. """
    filepath = utils.make_bogus_binary_file(1 * MB - 777771)
    try:
        file = syn.store(File(filepath, parent=project))
        file2 = syn.get(file)
        assert file.externalURL == file2.externalURL and urlparse(file2.externalURL).scheme == "sftp"

        tmpdir = tempfile.mkdtemp()
        schedule_for_cleanup(tmpdir)

        ## test filename override
        file2.fileNameOverride = "whats_new_in_baltimore.data"
        file2 = syn.store(file2)
        ## TODO We haven't defined how filename override interacts with
        ## TODO previously cached files so, side-step that for now by
        ## TODO making sure the file is not in the cache!
        syn.cache.remove(file2.dataFileHandleId, delete=True)
        file3 = syn.get(file, downloadLocation=tmpdir)
        assert os.path.basename(file3.path) == file2.fileNameOverride

        ## test that we got an MD5 à la SYNPY-185
        assert_is_not_none(file3.md5)
        fh = syn._getFileHandle(file3.dataFileHandleId)
        assert_is_not_none(fh["contentMd5"])
        assert_equals(file3.md5, fh["contentMd5"])
    finally:
        try:
            os.remove(filepath)
        except Exception:
            print(traceback.format_exc())
def test_store__changing_from_Synapse_to_externalURL_by_changing_path():
    # create a temp file
    temp_path = utils.make_bogus_data_file()
    schedule_for_cleanup(temp_path)

    ext = syn.store(synapseclient.File(temp_path, parent=project, synapseStore=True))
    ext = syn.get(ext)
    assert_equal("org.sagebionetworks.repo.model.file.S3FileHandle", ext._file_handle.concreteType)

    ext.synapseStore = False
    ext = syn.store(ext)

    # do a get to make sure filehandle has been updated correctly
    ext = syn.get(ext.id, downloadFile=True)
    assert_equal("org.sagebionetworks.repo.model.file.ExternalFileHandle", ext._file_handle.concreteType)
    assert_equal(utils.as_url(temp_path), ext.externalURL)
    assert_equal(False, ext.synapseStore)

    # swap back to synapse storage
    ext.synapseStore = True
    ext = syn.store(ext)
    # do a get to make sure filehandle has been updated correctly
    ext = syn.get(ext.id, downloadFile=True)
    assert_equal("org.sagebionetworks.repo.model.file.S3FileHandle", ext._file_handle.concreteType)
    assert_equal(None, ext.externalURL)
    assert_equal(True, ext.synapseStore)
def test_store_file_handle_update_metadata():
    original_file_path = utils.make_bogus_data_file()
    schedule_for_cleanup(original_file_path)

    # upload the project
    entity = syn.store(File(original_file_path, parent=project))
    old_file_handle = entity._file_handle

    # create file handle to replace the old one
    replacement_file_path = utils.make_bogus_data_file()
    schedule_for_cleanup(replacement_file_path)
    new_file_handle = syn.uploadFileHandle(replacement_file_path, parent=project)

    entity.dataFileHandleId = new_file_handle['id']
    new_entity = syn.store(entity)

    # make sure _file_handle info was changed
    # (_file_handle values are all changed at once so just verifying id change is sufficient)
    assert_equal(new_file_handle['id'], new_entity._file_handle['id'])
    assert_not_equal(old_file_handle['id'], new_entity._file_handle['id'])

    # check that local_state was updated
    assert_equal(replacement_file_path, new_entity.path)
    assert_equal(os.path.dirname(replacement_file_path), new_entity.cacheDir)
    assert_equal([os.path.basename(replacement_file_path)], new_entity.files)
def test_uploadFileEntity():
    # Create a FileEntity
    # Dictionaries default to FileEntity as a type
    fname = utils.make_bogus_data_file()
    schedule_for_cleanup(fname)
    entity = {'name'        : 'fooUploadFileEntity', \
              'description' : 'A test file entity', \
              'parentId'    : project['id']}
    entity = syn.uploadFile(entity, fname)

    # Download and verify
    entity = syn.downloadEntity(entity)
    assert entity['files'][0] == os.path.basename(fname)
    assert filecmp.cmp(fname, entity['path'])

    # Check if we upload the wrong type of file handle
    fh = syn.restGET('/entity/%s/filehandles' % entity.id)['list'][0]
    assert fh['concreteType'] == 'org.sagebionetworks.repo.model.file.S3FileHandle'

    # Create a different temporary file
    fname = utils.make_bogus_data_file()
    schedule_for_cleanup(fname)

    # Update existing FileEntity
    entity = syn.uploadFile(entity, fname)

    # Download and verify that it is the same file
    entity = syn.downloadEntity(entity)
    assert entity['files'][0] == os.path.basename(fname)
    assert filecmp.cmp(fname, entity['path'])
def test_synStore_sftpIntegration():
    """Creates a File Entity on an sftp server and add the external url. """
    filepath = utils.make_bogus_binary_file(1 * MB - 777771)
    try:
        file = syn.store(File(filepath, parent=project))
        file2 = syn.get(file)
        assert file.externalURL == file2.externalURL and urlparse(
            file2.externalURL).scheme == 'sftp'

        tmpdir = tempfile.mkdtemp()
        schedule_for_cleanup(tmpdir)

        ## test filename override
        file2.fileNameOverride = "whats_new_in_baltimore.data"
        file2 = syn.store(file2)
        ## TODO We haven't defined how filename override interacts with
        ## TODO previously cached files so, side-step that for now by
        ## TODO making sure the file is not in the cache!
        syn.cache.remove(file2.dataFileHandleId, delete=True)
        file3 = syn.get(file, downloadLocation=tmpdir)
        assert os.path.basename(file3.path) == file2.fileNameOverride

        ## test that we got an MD5 à la SYNPY-185
        assert_is_not_none(file3.md5)
        fh = syn._getFileHandle(file3.dataFileHandleId)
        assert_is_not_none(fh['contentMd5'])
        assert_equals(file3.md5, fh['contentMd5'])
    finally:
        try:
            os.remove(filepath)
        except Exception:
            print(traceback.format_exc())
def test_uploadFileEntity():
    # Create a FileEntity
    # Dictionaries default to FileEntity as a type
    fname = utils.make_bogus_data_file()
    schedule_for_cleanup(fname)
    entity = {'name'        : 'fooUploadFileEntity', \
              'description' : 'A test file entity', \
              'parentId'    : project['id']}
    entity = syn.uploadFile(entity, fname)

    # Download and verify
    entity = syn.downloadEntity(entity)

    print entity['files']
    assert entity['files'][0] == os.path.basename(fname)
    assert filecmp.cmp(fname, entity['path'])

    # Check if we upload the wrong type of file handle
    fh = syn.restGET('/entity/%s/filehandles' % entity.id)['list'][0]
    assert fh[
        'concreteType'] == 'org.sagebionetworks.repo.model.file.S3FileHandle'

    # Create a different temporary file
    fname = utils.make_bogus_data_file()
    schedule_for_cleanup(fname)

    # Update existing FileEntity
    entity = syn.uploadFile(entity, fname)

    # Download and verify that it is the same file
    entity = syn.downloadEntity(entity)
    print entity['files']
    assert_equals(entity['files'][0], os.path.basename(fname))
    assert filecmp.cmp(fname, entity['path'])
def test_synapse_integer_columns_with_missing_values_from_dataframe():
    #SYNPY-267
    cols = [
        Column(name='x', columnType='STRING'),
        Column(name='y', columnType='INTEGER'),
        Column(name='z', columnType='DOUBLE')
    ]
    schema = syn.store(Schema(name='Big Table', columns=cols, parent=project))

    ## write rows to CSV file
    with tempfile.NamedTemporaryFile(mode="w", suffix=".csv",
                                     delete=False) as temp:
        schedule_for_cleanup(temp.name)
        #2nd row is missing a value in its integer column
        temp.write('x,y,z\na,1,0.9\nb,,0.8\nc,3,0.7\n')
        temp.flush()
        filename = temp.name

    #create a table from csv
    table = Table(schema, filename)
    df = table.asDataFrame()

    table_from_dataframe = Table(schema, df)
    assert_not_equal(table.filepath, table_from_dataframe.filepath)
    #compare to make sure no .0's were appended to the integers
    assert filecmp.cmp(table.filepath, table_from_dataframe.filepath)
def test_uploadFile_given_dictionary():
    # Make a Folder Entity the old fashioned way
    folder = {
        'concreteType': Folder._synapse_entity_type,
        'parentId': project['id'],
        'name': 'fooDictionary',
        'foo': 334455
    }
    entity = syn.store(folder)

    # Download and verify that it is the same file
    entity = syn.get(entity)
    assert entity.parentId == project.id
    assert entity.foo[0] == 334455

    # Update via a dictionary
    path = utils.make_bogus_data_file()
    schedule_for_cleanup(path)
    rareCase = {}
    rareCase.update(entity.annotations)
    rareCase.update(entity.properties)
    rareCase.update(entity.local_state())
    rareCase[
        'description'] = 'Updating with a plain dictionary should be rare.'

    # Verify it works
    entity = syn.store(rareCase)
    assert entity.description == rareCase['description']
    assert entity.name == 'fooDictionary'
    entity = syn.get(entity['id'])
def test_resume_partial_download():
    original_file = utils.make_bogus_data_file(40000)
    original_md5 = utils.md5_for_file(original_file).hexdigest()

    entity = File(original_file, parent=project['id'])
    entity = syn.store(entity)

    ## stash the original file for comparison later
    shutil.move(original_file, original_file+'.original')
    original_file += '.original'
    schedule_for_cleanup(original_file)

    temp_dir = tempfile.gettempdir()

    url = '%s/entity/%s/file' % (syn.repoEndpoint, entity.id)
    path = syn._download(url, destination=temp_dir, file_handle_id=entity.dataFileHandleId, expected_md5=entity.md5)

    ## simulate an imcomplete download by putting the
    ## complete file back into its temporary location
    tmp_path = utils.temp_download_filename(temp_dir, entity.dataFileHandleId)
    shutil.move(path, tmp_path)

    ## ...and truncating it to some fraction of its original size
    with open(tmp_path, 'r+') as f:
        f.truncate(3*os.path.getsize(original_file)//7)

    ## this should complete the partial download
    path = syn._download(url, destination=temp_dir, file_handle_id=entity.dataFileHandleId, expected_md5=entity.md5)

    assert filecmp.cmp(original_file, path), "File comparison failed"
Example #24
0
def test_syncFromSynapse__given_file_id():
    file_path = utils.make_bogus_data_file()
    schedule_for_cleanup(file_path)
    file = syn.store(File(file_path, name=str(uuid.uuid4()), parent=project, synapseStore=False))
    all_files = synapseutils.syncFromSynapse(syn, file.id)
    assert_equals(1, len(all_files))
    assert_equals(file, all_files[0])
def test_multipart_upload_big_string():
    cities = ["Seattle", "Portland", "Vancouver", "Victoria",
              "San Francisco", "Los Angeles", "New York",
              "Oaxaca", "Cancún", "Curaçao", "जोधपुर",
              "অসম", "ལྷ་ས།", "ཐིམ་ཕུ་", "دبي", "አዲስ አበባ",
              "São Paulo", "Buenos Aires", "Cartagena",
              "Amsterdam", "Venice", "Rome", "Dubrovnik",
              "Sarajevo", "Madrid", "Barcelona", "Paris",
              "Αθήνα", "Ρόδος", "København", "Zürich",
              "金沢市", "서울", "แม่ฮ่องสอน", "Москва"]

    text = "Places I wanna go:\n"
    while len(text.encode('utf-8')) < multipart_upload_module.MIN_PART_SIZE:
        text += ", ".join( random.choice(cities) for i in range(5000) ) + "\n"

    fhid = multipart_upload_string(syn, text)

    # Download the file and compare it with the original
    junk = File(parent=project, dataFileHandleId=fhid)
    junk.properties.update(syn._createEntity(junk.properties))
    (tmp_f, tmp_path) = tempfile.mkstemp()
    schedule_for_cleanup(tmp_path)

    junk['path'] = syn._downloadFileHandle(fhid, junk['id'], "FileEntity" ,tmp_path)

    with open(junk.path, encoding='utf-8') as f:
        retrieved_text = f.read()

    assert retrieved_text == text
Example #26
0
def test_copyFileHandles__copying_cached_file_handles():
    num_files = 3
    file_entities = []

    #upload temp files to synapse
    for i in range(num_files):
        file_path = utils.make_bogus_data_file();
        schedule_for_cleanup(file_path)
        file_entities.append(syn.store(File(file_path,name=str(uuid.uuid1()), parent=project)))

    #a bunch of setup for arguments to the function under test
    file_handles = [file_entity['_file_handle'] for file_entity in file_entities ]
    file_entity_ids = [file_entity['id'] for file_entity in file_entities]
    content_types = [file_handle['contentType'] for file_handle in file_handles]
    filenames = [file_handle['fileName'] for file_handle in file_handles]

    #remove every other FileHandle from the cache (at even indicies)
    for i in range(num_files):
        if i % 2 == 0:
            syn.cache.remove(file_handles[i]["id"])

    #get the new list of file_handles
    copiedFileHandles = synapseutils.copyFileHandles(syn, file_handles , ["FileEntity"] * num_files, file_entity_ids,content_types , filenames)
    new_file_handle_ids = [copy_result['newFileHandle']['id'] for copy_result in copiedFileHandles['copyResults']]

    #verify that the cached paths are the same
    for i in range(num_files):
        original_path = syn.cache.get(file_handles[i]['id'])
        new_path = syn.cache.get(new_file_handle_ids[i])
        if i % 2 == 0: # since even indicies are not cached, both should be none
            assert_is_none(original_path)
            assert_is_none(new_path)
        else: # at odd indicies, the file path should have been copied
            assert_equals(original_path, new_path)
def test_wikiAttachment():
    # Upload a file to be attached to a Wiki
    filename = utils.make_bogus_data_file()
    attachname = utils.make_bogus_data_file()
    schedule_for_cleanup(filename)
    schedule_for_cleanup(attachname)
    fileHandle = upload_synapse_s3(syn, filename)

    # Create and store a Wiki 
    # The constructor should accept both file handles and file paths
    md = """
    This is a test wiki
    =======================

    Blabber jabber blah blah boo.
    """
    wiki = Wiki(owner=project, title='A Test Wiki', markdown=md, 
                fileHandles=[fileHandle['id']], 
                attachments=[attachname])
    wiki = syn.store(wiki)
    
    # Create a Wiki sub-page
    subwiki = Wiki(owner=project, title='A sub-wiki', 
                   markdown='nothing', parentWikiId=wiki.id)
    subwiki = syn.store(subwiki)
    
    # Retrieve the root Wiki from Synapse
    wiki2 = syn.getWiki(project)
    # due to the new wiki api, we'll get back some new properties,
    # namely markdownFileHandleId and markdown_path, so only compare
    # properties that are in the first object
    for property_name in wiki:
        assert_equal(wiki[property_name], wiki2[property_name])

    # Retrieve the sub Wiki from Synapse
    wiki2 = syn.getWiki(project, subpageId=subwiki.id)
    for property_name in wiki:
        assert_equal(subwiki[property_name], wiki2[property_name])

    # Try making an update
    wiki['title'] = 'A New Title'
    wiki['markdown'] = wiki['markdown'] + "\nNew stuff here!!!\n"
    syn.store(wiki)
    wiki = syn.getWiki(project)
    assert_equals(wiki['title'], 'A New Title')
    assert_true(wiki['markdown'].endswith("\nNew stuff here!!!\n"))

    # Check the Wiki's metadata
    headers = syn.getWikiHeaders(project)
    assert_equals(len(headers), 2)
    assert_in(headers[0]['title'], (wiki['title'], subwiki['title']))

    file_handles = syn.getWikiAttachments(wiki)
    file_names = [fh['fileName'] for fh in file_handles]
    for fn in [filename, attachname]:
        assert_in(os.path.basename(fn), file_names)

    syn.delete(subwiki)
    syn.delete(wiki)
    assert_raises(SynapseHTTPError, syn.getWiki, project)
Example #28
0
def _set_up_external_s3_project():
    """
    creates a project and links it to an external s3 storage
    :return: synapse id of the created  project, and storageLocationId of the project
    """
    EXTERNAL_S3_BUCKET = 'python-client-integration-test.sagebase.org'
    project_ext_s3 = syn.store(Project(name=str(uuid.uuid4())))

    destination = {
        'uploadType': 'S3',
        'concreteType':
        'org.sagebionetworks.repo.model.project.ExternalS3StorageLocationSetting',
        'bucket': EXTERNAL_S3_BUCKET
    }
    destination = syn.restPOST('/storageLocation',
                               body=json.dumps(destination))

    project_destination = {
        'concreteType':
        'org.sagebionetworks.repo.model.project.UploadDestinationListSetting',
        'settingsType': 'upload',
        'locations': [destination['storageLocationId']],
        'projectId': project_ext_s3.id
    }

    syn.restPOST('/projectSettings', body=json.dumps(project_destination))
    schedule_for_cleanup(project_ext_s3)
    return project_ext_s3.id, destination['storageLocationId']
def test_teams():
    name = "My Uniquely Named Team " + str(uuid.uuid4())
    team = syn.store(Team(name=name, description="A fake team for testing..."))
    schedule_for_cleanup(team)

    found_team = syn.getTeam(team.id)
    assert team == found_team

    p = syn.getUserProfile()
    found = None
    for m in syn.getTeamMembers(team):
        if m.member.ownerId == p.ownerId:
            found = m
            break

    assert found is not None, "Couldn't find user {} in team".format(p.username)

    ## needs to be retried 'cause appending to the search index is asynchronous
    tries = 10
    found_team = None
    while tries > 0:
        try:
            found_team = syn.getTeam(name)
            break
        except ValueError:
            tries -= 1
            if tries > 0: time.sleep(1)
    assert team == found_team
Example #30
0
def test_getChildren():
    # setup a hierarchy for folders
    # PROJECT
    # |     \
    # File   Folder
    #           |
    #         File
    project_name = str(uuid.uuid1())
    test_project = syn.store(Project(name=project_name))
    folder = syn.store(Folder(name="firstFolder", parent=test_project))
    syn.store(
        File(path="~/doesntMatter.txt",
             name="file inside folders",
             parent=folder,
             synapseStore=False))
    project_file = syn.store(
        File(path="~/doesntMatterAgain.txt",
             name="file inside project",
             parent=test_project,
             synapseStore=False))
    schedule_for_cleanup(test_project)

    expected_id_set = {project_file.id, folder.id}
    children_id_set = {x['id'] for x in syn.getChildren(test_project.id)}
    assert_equals(expected_id_set, children_id_set)
Example #31
0
def test_store_file_handle_update_metadata():
    original_file_path = utils.make_bogus_data_file()
    schedule_for_cleanup(original_file_path)

    #upload the project
    entity = syn.store(File(original_file_path, parent=project))
    old_file_handle = entity._file_handle

    #create file handle to replace the old one
    replacement_file_path = utils.make_bogus_data_file()
    schedule_for_cleanup(replacement_file_path)
    new_file_handle = syn.uploadFileHandle(replacement_file_path,
                                           parent=project)

    entity.dataFileHandleId = new_file_handle['id']
    new_entity = syn.store(entity)

    #make sure _file_handle info was changed (_file_handle values are all changed at once so just verifying id change is sufficient)
    assert_equal(new_file_handle['id'], new_entity._file_handle['id'])
    assert_not_equal(old_file_handle['id'], new_entity._file_handle['id'])

    #check that local_state was updated
    assert_equal(replacement_file_path, new_entity.path)
    assert_equal(os.path.dirname(replacement_file_path), new_entity.cacheDir)
    assert_equal([os.path.basename(replacement_file_path)], new_entity.files)
def test_teams():
    name = "My Uniquely Named Team " + str(uuid.uuid4())
    team = syn.store(Team(name=name, description="A fake team for testing..."))
    schedule_for_cleanup(team)

    found_team = syn.getTeam(team.id)
    assert_equals(team, found_team)

    p = syn.getUserProfile()
    found = None
    for m in syn.getTeamMembers(team):
        if m.member.ownerId == p.ownerId:
            found = m
            break

    assert_is_not_none(found,
                       "Couldn't find user {} in team".format(p.userName))

    # needs to be retried 'cause appending to the search index is asynchronous
    tries = 10
    found_team = None
    while tries > 0:
        try:
            found_team = syn.getTeam(name)
            break
        except ValueError:
            tries -= 1
            if tries > 0:
                time.sleep(1)
    assert_equals(team, found_team)
Example #33
0
def test_store__changing_from_Synapse_to_externalURL_by_changing_path():
    #create a temp file
    temp_path = utils.make_bogus_data_file()
    schedule_for_cleanup(temp_path)

    ext = syn.store(
        synapseclient.File(temp_path, parent=project, synapseStore=True))
    ext = syn.get(ext)
    assert_equal("org.sagebionetworks.repo.model.file.S3FileHandle",
                 ext._file_handle.concreteType)

    ext.synapseStore = False
    ext = syn.store(ext)

    #do a get to make sure filehandle has been updated correctly
    ext = syn.get(ext.id, downloadFile=True)
    assert_equal("org.sagebionetworks.repo.model.file.ExternalFileHandle",
                 ext._file_handle.concreteType)
    assert_equal(utils.as_url(temp_path), ext.externalURL)
    assert_equal(False, ext.synapseStore)

    #swap back to synapse storage
    ext.synapseStore = True
    ext = syn.store(ext)
    #do a get to make sure filehandle has been updated correctly
    ext = syn.get(ext.id, downloadFile=True)
    assert_equal("org.sagebionetworks.repo.model.file.S3FileHandle",
                 ext._file_handle.concreteType)
    assert_equal(None, ext.externalURL)
    assert_equal(True, ext.synapseStore)
Example #34
0
def test_slow_unlocker():
    """Manually grabs a lock and makes sure the get/store methods are blocked."""
    
    # Make a file to manually lock
    path = utils.make_bogus_data_file()
    schedule_for_cleanup(path)
    contention = File(path, parent=syn.test_parent)
    contention = syn.store(contention)
    
    # Lock the Cache Map
    cacheDir = cache.determine_cache_directory(contention['dataFileHandleId'])
    cache.obtain_lock_and_read_cache(cacheDir)
    
    # Start a few calls to get/store that should not complete yet
    store_thread = wrap_function_as_child_thread(lambda: store_catch_412_HTTPError(contention))
    get_thread = wrap_function_as_child_thread(lambda: syn.get(contention))
    thread.start_new_thread(store_thread, ())
    thread.start_new_thread(get_thread, ())
    time.sleep(min(5, cache.CACHE_LOCK_TIME / 2))
    
    # Make sure the threads did not finish
    assert syn.test_threadsRunning > 0
    cache.write_cache_then_release_lock(cacheDir)
    
    # Let the threads go
    while syn.test_threadsRunning > 0:
        time.sleep(1)
    collect_errors_and_fail()
def test_get_and_store_by_name_and_parent_id():
    project = create_project()

    path = utils.make_bogus_data_file()
    schedule_for_cleanup(path)

    f = File(path, name='Foobarbat', parent=project)
    f2 = syn.store(f)
    f = syn.get(f)

    assert f.id == f2.id
    assert f.name == f2.name
    assert f.parentId == f2.parentId

    ## new file
    path = utils.make_bogus_data_file()
    schedule_for_cleanup(path)

    ## should create a new version of the previous File entity
    f3 = File(path,
              name='Foobarbat',
              parent=project,
              description='banana',
              junk=1234)
    f3 = syn.store(f3)

    ## should be an update of the existing entity with the same name and parent
    assert f3.id == f.id
    assert f3.description == 'banana'
    assert f3.junk == [1234]
    assert filecmp.cmp(path, f3.path)
def test_ftp_download():
    """Test downloading an Entity that points to a file on an FTP server. """

    # Another test with an external reference. This is because we only need to test FTP download; not upload. Also so we don't have to maintain an FTP server just for this purpose.
    # Make an entity that points to an FTP server file.
    entity = File(parent=project['id'], name='1KB.zip')
    fileHandle = {}
    fileHandle['externalURL'] = 'ftp://speedtest.tele2.net/1KB.zip'
    fileHandle["fileName"] = entity.name
    fileHandle["contentType"] = "application/zip"
    fileHandle["contentMd5"] = '0f343b0931126a20f133d67c2b018a3b'
    fileHandle["contentSize"] = 1024
    fileHandle[
        "concreteType"] = "org.sagebionetworks.repo.model.file.ExternalFileHandle"
    fileHandle = syn.restPOST('/externalFileHandle', json.dumps(fileHandle),
                              syn.fileHandleEndpoint)
    entity.dataFileHandleId = fileHandle['id']
    entity = syn.store(entity)

    # Download the entity and check that MD5 matches expected
    FTPfile = syn.get(entity.id,
                      downloadLocation=os.getcwd(),
                      downloadFile=True)
    assert FTPfile.md5 == utils.md5_for_file(FTPfile.path).hexdigest()
    schedule_for_cleanup(entity)
    os.remove(FTPfile.path)
def test_store_activity():
    # Create a File and an Activity
    path = utils.make_bogus_binary_file()
    schedule_for_cleanup(path)
    entity = File(path, name='Hinkle horn honking holes', parent=project)
    honking = Activity(name='Hinkle horn honking', 
                       description='Nettlebed Cave is a limestone cave located on the South Island of New Zealand.')
    honking.used('http://www.flickr.com/photos/bevanbfree/3482259379/')
    honking.used('http://www.flickr.com/photos/bevanbfree/3482185673/')

    # This doesn't set the ID of the Activity
    entity = syn.store(entity, activity=honking)

    # But this does
    honking = syn.getProvenance(entity.id)

    # Verify the Activity
    assert honking['name'] == 'Hinkle horn honking'
    assert len(honking['used']) == 2
    assert honking['used'][0]['concreteType'] == 'org.sagebionetworks.repo.model.provenance.UsedURL'
    assert honking['used'][0]['wasExecuted'] == False
    assert honking['used'][0]['url'].startswith('http://www.flickr.com/photos/bevanbfree/3482')
    assert honking['used'][1]['concreteType'] == 'org.sagebionetworks.repo.model.provenance.UsedURL'
    assert honking['used'][1]['wasExecuted'] == False

    # Store another Entity with the same Activity
    entity = File('http://en.wikipedia.org/wiki/File:Nettlebed_cave.jpg', 
                  name='Nettlebed Cave', parent=project, synapseStore=False)
    entity = syn.store(entity, activity=honking)

    # The Activities should match
    honking2 = syn.getProvenance(entity)
    assert honking['id'] == honking2['id']
Example #38
0
def dontruntest_big_csvs():
    cols = []
    cols.append(Column(name='name', columnType='STRING', maximumSize=1000))
    cols.append(Column(name='foo', columnType='STRING', enumValues=['foo', 'bar', 'bat']))
    cols.append(Column(name='x', columnType='DOUBLE'))
    cols.append(Column(name='n', columnType='INTEGER'))
    cols.append(Column(name='is_bogus', columnType='BOOLEAN'))

    schema1 = syn.store(Schema(name='Big Table', columns=cols, parent=project))

    print "Created table:", schema1.id
    print "with columns:", schema1.columnIds

    ## write rows to CSV file
    with tempfile.NamedTemporaryFile(delete=False) as temp:
        schedule_for_cleanup(temp.name)
        writer = csv.writer(temp, quoting=csv.QUOTE_NONNUMERIC, lineterminator=os.linesep)
        writer.writerow([col.name for col in cols])

        for i in range(10):
            for j in range(100):
                foo = cols[1].enumValues[random.randint(0,2)]
                writer.writerow(('Robot ' + str(i*100 + j), foo, random.random()*200.0, random.randint(0,100), random.random()>=0.5))
            print "wrote 100 rows to disk"

    ## upload CSV
    UploadToTableResult = syn._uploadCsv(filepath=temp.name, schema=schema1)

    from synapseclient.table import CsvFileTable
    results = CsvFileTable.from_table_query(syn, "select * from %s" % schema1.id)
    print "etag:", results.etag
    print "tableId:", results.tableId

    for row in results:
        print row
def test_synapseStore_flag():
    # Store a path to a local file
    path = utils.make_bogus_data_file()
    schedule_for_cleanup(path)
    bogus = File(path, name='Totally bogus data', parent=project, synapseStore=False)
    bogus = syn.store(bogus)
    
    # Verify the thing can be downloaded as a URL
    bogus = syn.get(bogus, downloadFile=False)
    assert bogus.name == 'Totally bogus data'
    assert bogus.path == path, "Path: %s\nExpected: %s" % (bogus.path, path)
    assert bogus.synapseStore == False

    # Make sure the test runs on Windows and other OS's
    if path[0].isalpha() and path[1]==':':
        # A Windows file URL looks like this: file:///c:/foo/bar/bat.txt
        expected_url = 'file:///' + path
    else:
        expected_url = 'file://' + path

    assert bogus.externalURL == expected_url, 'URL: %s\nExpected %s' % (bogus.externalURL, expected_URL)

    # A file path that doesn't exist should still work
    bogus = File('/path/to/local/file1.xyz', parentId=project.id, synapseStore=False)
    bogus = syn.store(bogus)
    assert_raises(IOError, syn.get, bogus)
    assert bogus.synapseStore == False

    # Try a URL
    bogus = File('http://dev-versions.synapse.sagebase.org/synapsePythonClient', parent=project, synapseStore=False)
    bogus = syn.store(bogus)
    bogus = syn.get(bogus)
    assert bogus.synapseStore == False
def dontruntest_big_csvs():
    cols = [
        Column(name='name', columnType='STRING', maximumSize=1000),
        Column(name='foo',
               columnType='STRING',
               enumValues=['foo', 'bar', 'bat']),
        Column(name='x', columnType='DOUBLE'),
        Column(name='n', columnType='INTEGER'),
        Column(name='is_bogus', columnType='BOOLEAN')
    ]

    schema1 = syn.store(Schema(name='Big Table', columns=cols, parent=project))

    # write rows to CSV file
    with tempfile.NamedTemporaryFile(delete=False) as temp:
        schedule_for_cleanup(temp.name)
        filename = temp.name

    with io.open(filename, mode='w', encoding="utf-8", newline='') as temp:
        writer = csv.writer(temp,
                            quoting=csv.QUOTE_NONNUMERIC,
                            lineterminator=str(os.linesep))
        writer.writerow([col.name for col in cols])

        for i in range(10):
            for j in range(100):
                foo = cols[1].enumValues[random.randint(0, 2)]
                writer.writerow(
                    ('Robot ' + str(i * 100 + j), foo, random.random() * 200.0,
                     random.randint(0, 100), random.random() >= 0.5))
    # upload CSV
    syn._uploadCsv(filepath=temp.name, schema=schema1)

    from synapseclient.table import CsvFileTable
    CsvFileTable.from_table_query(syn, "select * from %s" % schema1.id)
def test_get_local_file():
    """Tests synapse.get() with local a local file """
    new_path = utils.make_bogus_data_file()
    schedule_for_cleanup(new_path)
    folder = Folder('TestFindFileFolder', parent=project, description='A place to put my junk')
    folder = syn.createEntity(folder)

    #Get an nonexistent file in Synapse
    assert_raises(SynapseError, syn.get, new_path)

    #Get a file really stored in Synapse
    ent_folder = syn.store(File(new_path, parent=folder))
    ent2 = syn.get(new_path)
    assert ent_folder.id==ent2.id and ent_folder.versionNumber==ent2.versionNumber

    #Get a file stored in Multiple locations #should display warning
    ent = syn.store(File(new_path, parent=project))
    ent = syn.get(new_path)

    #Get a file stored in multiple locations with limit set
    ent = syn.get(new_path, limitSearch=folder.id)
    assert ent.id == ent_folder.id and ent.versionNumber==ent_folder.versionNumber

    #Get a file that exists but such that limitSearch removes them and raises error
    assert_raises(SynapseError, syn.get, new_path, limitSearch='syn1')
def test_synapse_integer_columns_with_missing_values_from_dataframe():
    # SYNPY-267
    cols = [
        Column(name='x', columnType='STRING'),
        Column(name='y', columnType='INTEGER'),
        Column(name='z', columnType='DOUBLE')
    ]
    schema = syn.store(Schema(name='Big Table', columns=cols, parent=project))

    line_terminator = str(os.linesep)
    # write rows to CSV file
    with tempfile.NamedTemporaryFile(mode="w", suffix=".csv",
                                     delete=False) as temp:
        schedule_for_cleanup(temp.name)
        # 2nd row is missing a value in its integer column
        temp.write('x,y,z' + line_terminator + 'a,1,0.9' + line_terminator +
                   'b,,0.8' + line_terminator + 'c,3,0.7' + line_terminator)
        temp.flush()
        filename = temp.name

    # create a table from csv
    table = Table(schema, filename)
    df = table.asDataFrame()

    table_from_dataframe = Table(schema, df)
    assert_not_equal(table.filepath, table_from_dataframe.filepath)
    df2 = table_from_dataframe.asDataFrame()
    assert_frame_equal(df, df2)
def test_get_local_file():
    """Tests synapse.get() with local a local file """
    new_path = utils.make_bogus_data_file()
    schedule_for_cleanup(new_path)
    folder = Folder('TestFindFileFolder', parent=project, description='A place to put my junk')
    folder = syn.createEntity(folder)

    #Get an nonexistent file in Synapse
    assert_raises(SynapseError, syn.get, new_path)

    #Get a file really stored in Synapse
    ent_folder = syn.store(File(new_path, parent=folder))
    ent2 = syn.get(new_path)
    assert ent_folder.id==ent2.id and ent_folder.versionNumber==ent2.versionNumber

    #Get a file stored in Multiple locations #should display warning
    ent = syn.store(File(new_path, parent=project))
    ent = syn.get(new_path)

    #Get a file stored in multiple locations with limit set
    ent = syn.get(new_path, limitSearch=folder.id)
    assert ent.id == ent_folder.id and ent.versionNumber==ent_folder.versionNumber

    #Get a file that exists but such that limitSearch removes them and raises error
    assert_raises(SynapseError, syn.get, new_path, limitSearch='syn1')
def test_multipart_upload_big_string():
    cities = ["Seattle", "Portland", "Vancouver", "Victoria",
              "San Francisco", "Los Angeles", "New York",
              "Oaxaca", "Cancún", "Curaçao", "जोधपुर",
              "অসম", "ལྷ་ས།", "ཐིམ་ཕུ་", "دبي", "አዲስ አበባ",
              "São Paulo", "Buenos Aires", "Cartagena",
              "Amsterdam", "Venice", "Rome", "Dubrovnik",
              "Sarajevo", "Madrid", "Barcelona", "Paris",
              "Αθήνα", "Ρόδος", "København", "Zürich",
              "金沢市", "서울", "แม่ฮ่องสอน", "Москва"]

    text = "Places I wanna go:\n"
    while len(text.encode('utf-8')) < multipart_upload_module.MIN_PART_SIZE:
        text += ", ".join( random.choice(cities) for i in range(5000) ) + "\n"

    fhid = multipart_upload_string(syn, text)
    print('FileHandle: {fhid}'.format(fhid=fhid))

    # Download the file and compare it with the original
    junk = File("message.txt", parent=project, dataFileHandleId=fhid)
    junk.properties.update(syn._createEntity(junk.properties))
    (tmp_f, tmp_path) = tempfile.mkstemp()
    schedule_for_cleanup(tmp_path)
    junk.update(syn._downloadFileEntity(junk, tmp_path))

    with open(junk.path, encoding='utf-8') as f:
        retrieved_text = f.read()

    assert retrieved_text == text
def test_synapseStore_flag():
    # Store a path to a local file
    path = utils.make_bogus_data_file()
    schedule_for_cleanup(path)
    bogus = File(path, name='Totally bogus data', parent=project, synapseStore=False)
    bogus = syn.store(bogus)
    
    # Verify the thing can be downloaded as a URL
    bogus = syn.get(bogus, downloadFile=False)
    assert bogus.name == 'Totally bogus data'
    assert bogus.path == path, "Path: %s\nExpected: %s" % (bogus.path, path)
    assert bogus.synapseStore == False

    # Make sure the test runs on Windows and other OS's
    if path[0].isalpha() and path[1]==':':
        # A Windows file URL looks like this: file:///c:/foo/bar/bat.txt
        expected_url = 'file:///' + path.replace("\\","/")
    else:
        expected_url = 'file://' + path

    assert bogus.externalURL == expected_url, 'URL: %s\nExpected %s' % (bogus.externalURL, expected_url)

    # A file path that doesn't exist should still work
    bogus = File('/path/to/local/file1.xyz', parentId=project.id, synapseStore=False)
    bogus = syn.store(bogus)
    assert_raises(IOError, syn.get, bogus)
    assert bogus.synapseStore == False

    # Try a URL
    bogus = File('http://dev-versions.synapse.sagebase.org/synapsePythonClient', parent=project, synapseStore=False)
    bogus = syn.store(bogus)
    bogus = syn.get(bogus)
    assert bogus.synapseStore == False
def test_round_trip():
    fhid = None
    filepath = utils.make_bogus_binary_file(multipart_upload_module.MIN_PART_SIZE + 777771)
    print('Made bogus file: ', filepath)
    try:
        fhid = multipart_upload(syn, filepath)
        print('FileHandle: {fhid}'.format(fhid=fhid))

        # Download the file and compare it with the original
        junk = File(filepath, parent=project, dataFileHandleId=fhid)
        junk.properties.update(syn._createEntity(junk.properties))
        (tmp_f, tmp_path) = tempfile.mkstemp()
        schedule_for_cleanup(tmp_path)
        junk.update(syn._downloadFileEntity(junk, tmp_path))
        assert filecmp.cmp(filepath, junk.path)

    finally:
        try:
            if 'junk' in locals():
                syn.delete(junk)
        except Exception:
            print(traceback.format_exc())
        try:
            os.remove(filepath)
        except Exception:
            print(traceback.format_exc())
def test_round_trip():
    fhid = None
    filepath = utils.make_bogus_binary_file(multipart_upload_module.MIN_PART_SIZE + 777771)
    try:
        fhid = multipart_upload(syn, filepath)

        # Download the file and compare it with the original
        junk = File(parent=project, dataFileHandleId=fhid)
        junk.properties.update(syn._createEntity(junk.properties))
        (tmp_f, tmp_path) = tempfile.mkstemp()
        schedule_for_cleanup(tmp_path)

        junk['path'] = syn._downloadFileHandle(fhid, junk['id'], 'FileEntity', tmp_path)
        assert filecmp.cmp(filepath, junk.path)

    finally:
        try:
            if 'junk' in locals():
                syn.delete(junk)
        except Exception:
            print(traceback.format_exc())
        try:
            os.remove(filepath)
        except Exception:
            print(traceback.format_exc())
Example #48
0
def test_store__changing_externalURL_by_changing_path():
    url = 'https://www.synapse.org/Portal/clear.cache.gif'
    ext = syn.store(
        synapseclient.File(url,
                           name="test",
                           parent=project,
                           synapseStore=False))

    #perform a syn.get so the filename changes
    ext = syn.get(ext)

    #create a temp file
    temp_path = utils.make_bogus_data_file()
    schedule_for_cleanup(temp_path)

    ext.synapseStore = False
    ext.path = temp_path
    ext = syn.store(ext)

    #do a get to make sure filehandle has been updated correctly
    ext = syn.get(ext.id, downloadFile=True)

    assert_not_equal(ext.externalURL, url)
    assert_equal(utils.normalize_path(temp_path),
                 utils.file_url_to_path(ext.externalURL))
    assert_equal(temp_path, ext.path)
    assert_equal(False, ext.synapseStore)
def test_wikiAttachment():
    # Upload a file to be attached to a Wiki
    filename = utils.make_bogus_data_file()
    attachname = utils.make_bogus_data_file()
    schedule_for_cleanup(filename)
    schedule_for_cleanup(attachname)
    fileHandle = syn._uploadFileToFileHandleService(filename)

    # Create and store a Wiki 
    # The constructor should accept both file handles and file paths
    md = """
    This is a test wiki
    =======================

    Blabber jabber blah blah boo.
    """
    wiki = Wiki(owner=project, title='A Test Wiki', markdown=md, 
                fileHandles=[fileHandle['id']], 
                attachments=[attachname])
    wiki = syn.store(wiki)
    
    # Create a Wiki sub-page
    subwiki = Wiki(owner=project, title='A sub-wiki', 
                   markdown='nothing', parentWikiId=wiki.id)
    subwiki = syn.store(subwiki)
    
    # Retrieve the root Wiki from Synapse
    wiki2 = syn.getWiki(project)
    assert wiki == wiki2

    # Retrieve the sub Wiki from Synapse
    wiki2 = syn.getWiki(project, subpageId=subwiki.id)
    assert subwiki == wiki2

    # Try making an update
    wiki['title'] = 'A New Title'
    wiki['markdown'] = wiki['markdown'] + "\nNew stuff here!!!\n"
    wiki = syn.store(wiki)
    assert wiki['title'] == 'A New Title'
    assert wiki['markdown'].endswith("\nNew stuff here!!!\n")

    # Check the Wiki's metadata
    headers = syn.getWikiHeaders(project)
    assert headers['totalNumberOfResults'] == 2
    assert headers['results'][0]['title'] in (wiki['title'], subwiki['title'])

    # # Retrieve the file attachment
    # tmpdir = tempfile.mkdtemp()
    # file_props = syn._downloadWikiAttachment(project, wiki, 
    #                         os.path.basename(filename), dest_dir=tmpdir)
    # path = file_props['path']
    # assert os.path.exists(path)
    # assert filecmp.cmp(original_path, path)

    # Clean up
    # syn._deleteFileHandle(fileHandle)
    syn.delete(wiki)
    syn.delete(subwiki)
    assert_raises(SynapseHTTPError, syn.getWiki, project)
Example #50
0
def testMoveProject():
    proj1 = syn.store(Project(name=str(uuid.uuid4()) +
                              "testMoveProject-child"))
    proj2 = syn.store(
        Project(name=str(uuid.uuid4()) + "testMoveProject-newParent"))
    assert_raises(SynapseHTTPError, syn.move, proj1, proj2)
    schedule_for_cleanup(proj1)
    schedule_for_cleanup(proj2)
Example #51
0
def test_setPermissions__default_permissions():
    temp_proj = syn.store(Project(name=str(uuid.uuid4())))
    schedule_for_cleanup(temp_proj)

    syn.setPermissions(temp_proj, other_user['username'])
    permissions = syn.getPermissions(temp_proj, other_user['username'])

    assert_equals(set(['READ', 'DOWNLOAD']), set(permissions))
def _create_temp_file_with_cleanup(specific_file_text=None):
    if specific_file_text:
        with tempfile.NamedTemporaryFile(mode="w", suffix=".txt", delete=False) as file:
            file.write(specific_file_text)
            filename = file.name
    else:
        filename = utils.make_bogus_data_file()
    schedule_for_cleanup(filename)
    return filename
def test_download_local_file_URL_path():
    path = utils.make_bogus_data_file()
    schedule_for_cleanup(path)

    filehandle = create_external_file_handle(syn, path, mimetype=None, file_size=None)

    localFileEntity = syn.store(File(dataFileHandleId=filehandle['id'], parent=project))
    e = syn.get(localFileEntity.id)
    assert_equal(path, e.path)
def test_downloadFile():
    # See if the a "wget" works
    result = utils.download_file("http://dev-versions.synapse.sagebase.org/sage_bionetworks_logo_274x128.png")
    filename = result[0]
    
    # print "status: %s" % str(result[1].status))
    # print "filename: %s" % filename
    schedule_for_cleanup(filename)
    assert result, "Failed to download file: %s" % filename
    assert os.path.exists(filename)
def test_store_isRestricted_flag():
    # Store a file with access requirements
    path = utils.make_bogus_binary_file()
    schedule_for_cleanup(path)
    entity = File(path, name='Secret human data', parent=project)
    
    # We don't want to spam ACT with test emails
    with patch('synapseclient.client.Synapse._createAccessRequirementIfNone') as intercepted:
        entity = syn.store(entity, isRestricted=True)
        assert intercepted.called
def testCustomConfigFile():
    if os.path.isfile(client.CONFIG_FILE):
        configPath='./CONFIGFILE'
        shutil.copyfile(client.CONFIG_FILE, configPath)
        schedule_for_cleanup(configPath)

        syn2 = synapseclient.Synapse(configPath=configPath)
        syn2.login()
    else:
        print "To fully test the login method a configuration file is required"
def setup(module):
    module.syn = integration.syn
    module.project = integration.project
    
    # Use the module-level syn object to communicate between main and child threads
    # - Read-only objects (for the children)
    module.syn.test_parent = module.syn.store(Project(name=str(uuid.uuid4())))
    schedule_for_cleanup(module.syn.test_parent)
    module.syn.test_keepRunning = True
    
    # - Child writeable objects
    module.syn.test_errors = Queue()
    module.syn.test_runCountMutex = Lock()
    module.syn.test_threadsRunning = 0
def test_download_check_md5():
    tempfile_path = utils.make_bogus_data_file()
    schedule_for_cleanup(tempfile_path)
    entity = File(parent=project['id'])
    entity['path'] = tempfile_path
    entity = syn.store(entity)

    syn._downloadFileHandle(entity['dataFileHandleId'], entity['id'], 'FileEntity', tempfile.gettempdir())

    tempfile_path2 = utils.make_bogus_data_file()
    schedule_for_cleanup(tempfile_path2)
    entity_bad_md5 = syn.store(File(path=tempfile_path2, parent=project['id'], synapseStore=False))

    assert_raises(SynapseMd5MismatchError, syn._download_from_URL, entity_bad_md5['externalURL'], tempfile.gettempdir(),
                  entity_bad_md5['dataFileHandleId'], expected_md5="2345a")
def thread_keep_storing_one_File():
    """Makes one file and stores it over and over again."""
    
    # Make a local file to continuously store
    path = utils.make_bogus_data_file()
    schedule_for_cleanup(path)
    myPrecious = File(path, parent=syn.test_parent, description='This bogus file is MINE', mwa="hahahah")
    
    while syn.test_keepRunning:
        stored = store_catch_412_HTTPError(myPrecious)
        if stored is not None:
            myPrecious = stored
        else:
            myPrecious = syn.get(myPrecious)

        sleep_for_a_bit()
def test_path_in_annotations_of_data_entities_bug():
    # test for SYNR-610, path, files and cacheDir appearing in annotations
    data1 = syn.createEntity(Data(name='Yet more totally bogus data', parent=project))
    path = utils.make_bogus_data_file()
    schedule_for_cleanup(path)
    entity = syn.uploadFile(data1, path)

    ## entity should have a path, but not files and location at this point
    assert 'path' in entity and entity['path'] is not None

    data2 = syn.get(data1.id)

    ## These shouldn't be in annotations: "files" and "path" and "cacheDir"
    assert not 'files' in data2.annotations
    assert not 'path' in data2.annotations
    assert not 'cacheDir' in data2.annotations