def test_cache_remove():
    tmp_dir = tempfile.mkdtemp()
    my_cache = cache.Cache(cache_root_dir=tmp_dir)

    path1 = utils.touch(os.path.join(my_cache.get_cache_dir(101201), "file1.ext"))
    my_cache.add(file_handle_id=101201, path=path1)

    alt_dir = tempfile.mkdtemp()
    path2 = utils.touch(os.path.join(alt_dir, "file2.ext"))
    my_cache.add(file_handle_id=101201, path=path2)

    # remove the cached copy at path1
    rp = my_cache.remove({'dataFileHandleId': 101201, 'path': path1})

    assert len(rp) == 1
    assert utils.equal_paths(rp[0], path1)
    assert utils.equal_paths(my_cache.get(101201), path2)

    my_cache.remove(101201)
    assert_is_none(my_cache.get(101201))
Esempio n. 2
0
def test_cache_remove():
    tmp_dir = tempfile.mkdtemp()
    my_cache = cache.Cache(cache_root_dir=tmp_dir)

    path1 = utils.touch(
        os.path.join(my_cache.get_cache_dir(101201), "file1.ext"))
    my_cache.add(file_handle_id=101201, path=path1)

    alt_dir = tempfile.mkdtemp()
    path2 = utils.touch(os.path.join(alt_dir, "file2.ext"))
    my_cache.add(file_handle_id=101201, path=path2)

    # remove the cached copy at path1
    rp = my_cache.remove({'dataFileHandleId': 101201, 'path': path1})

    assert len(rp) == 1
    assert utils.equal_paths(rp[0], path1)
    assert utils.equal_paths(my_cache.get(101201), path2)

    my_cache.remove(101201)
    assert_is_none(my_cache.get(101201))
Esempio n. 3
0
def test_cache_store_get():
    tmp_dir = tempfile.mkdtemp()
    my_cache = cache.Cache(cache_root_dir=tmp_dir)

    path1 = utils.touch(
        os.path.join(my_cache.get_cache_dir(101201), "file1.ext"))
    my_cache.add(file_handle_id=101201, path=path1)

    path2 = utils.touch(
        os.path.join(my_cache.get_cache_dir(101202), "file2.ext"))
    my_cache.add(file_handle_id=101202, path=path2)

    # set path3's mtime to be later than path2's
    new_time_stamp = cache._get_modified_time(path2) + 2

    path3 = utils.touch(os.path.join(tmp_dir, "foo", "file2.ext"),
                        (new_time_stamp, new_time_stamp))
    my_cache.add(file_handle_id=101202, path=path3)

    a_file = my_cache.get(file_handle_id=101201)
    assert utils.equal_paths(a_file, path1)

    a_file = my_cache.get(file_handle_id=101201, path=path1)
    assert utils.equal_paths(a_file, path1)

    a_file = my_cache.get(file_handle_id=101201,
                          path=my_cache.get_cache_dir(101201))
    assert utils.equal_paths(a_file, path1)

    b_file = my_cache.get(file_handle_id=101202, path=os.path.dirname(path2))
    assert utils.equal_paths(b_file, path2)

    b_file = my_cache.get(file_handle_id=101202, path=os.path.dirname(path3))
    assert utils.equal_paths(b_file, path3)

    not_in_cache_file = my_cache.get(file_handle_id=101203, path=tmp_dir)
    assert_is_none(not_in_cache_file)

    removed = my_cache.remove(file_handle_id=101201, path=path1, delete=True)
    assert utils.normalize_path(path1) in removed
    assert len(removed) == 1
    assert_is_none(my_cache.get(file_handle_id=101201))

    removed = my_cache.remove(file_handle_id=101202, path=path3, delete=True)
    b_file = my_cache.get(file_handle_id=101202)
    assert utils.normalize_path(path3) in removed
    assert len(removed) == 1
    assert utils.equal_paths(b_file, path2)

    removed = my_cache.remove(file_handle_id=101202, delete=True)
    assert utils.normalize_path(path2) in removed
    assert len(removed) == 1
    assert_is_none(my_cache.get(file_handle_id=101202))
def test_cache_store_get():
    tmp_dir = tempfile.mkdtemp()
    my_cache = cache.Cache(cache_root_dir=tmp_dir)

    path1 = utils.touch(os.path.join(my_cache.get_cache_dir(101201), "file1.ext"))
    my_cache.add(file_handle_id=101201, path=path1)

    path2 = utils.touch(os.path.join(my_cache.get_cache_dir(101202), "file2.ext"))
    my_cache.add(file_handle_id=101202, path=path2)

    # set path3's mtime to be later than path2's
    new_time_stamp = cache._get_modified_time(path2)+2

    path3 = utils.touch(os.path.join(tmp_dir, "foo", "file2.ext"), (new_time_stamp, new_time_stamp))
    my_cache.add(file_handle_id=101202, path=path3)

    a_file = my_cache.get(file_handle_id=101201)
    assert utils.equal_paths(a_file, path1)

    a_file = my_cache.get(file_handle_id=101201, path=path1)
    assert utils.equal_paths(a_file, path1)

    a_file = my_cache.get(file_handle_id=101201, path=my_cache.get_cache_dir(101201))
    assert utils.equal_paths(a_file, path1)

    b_file = my_cache.get(file_handle_id=101202, path=os.path.dirname(path2))
    assert utils.equal_paths(b_file, path2)

    b_file = my_cache.get(file_handle_id=101202, path=os.path.dirname(path3))
    assert utils.equal_paths(b_file, path3)

    not_in_cache_file = my_cache.get(file_handle_id=101203, path=tmp_dir)
    assert_is_none(not_in_cache_file)

    removed = my_cache.remove(file_handle_id=101201, path=path1, delete=True)
    assert utils.normalize_path(path1) in removed
    assert len(removed) == 1
    assert_is_none(my_cache.get(file_handle_id=101201))

    removed = my_cache.remove(file_handle_id=101202, path=path3, delete=True)
    b_file = my_cache.get(file_handle_id=101202)
    assert utils.normalize_path(path3) in removed
    assert len(removed) == 1
    assert utils.equal_paths(b_file, path2)

    removed = my_cache.remove(file_handle_id=101202, delete=True)
    assert utils.normalize_path(path2) in removed
    assert len(removed) == 1
    assert_is_none(my_cache.get(file_handle_id=101202))
def test_cache_rules():
    # Cache should (in order of preference):
    #
    # 1. DownloadLocation specified:
    #   a. return exact match (unmodified file at the same path)
    #   b. return an unmodified file at another location,
    #      copy to downloadLocation subject to ifcollision
    #   c. download file to downloadLocation subject to ifcollision
    #
    # 2. DownloadLocation *not* specified:
    #   a. return an unmodified file at another location
    #   b. download file to cache_dir overwritting any existing file
    tmp_dir = tempfile.mkdtemp()
    my_cache = cache.Cache(cache_root_dir=tmp_dir)

    # put file in cache dir
    path1 = utils.touch(os.path.join(my_cache.get_cache_dir(101201), "file1.ext"))
    my_cache.add(file_handle_id=101201, path=path1)

    new_time_stamp = cache._get_modified_time(path1)+1
    path2 = utils.touch(os.path.join(tmp_dir, "not_in_cache", "file1.ext"), (new_time_stamp, new_time_stamp))
    my_cache.add(file_handle_id=101201, path=path2)

    new_time_stamp = cache._get_modified_time(path2)+1
    path3 = utils.touch(os.path.join(tmp_dir, "also_not_in_cache", "file1.ext"), (new_time_stamp, new_time_stamp))
    my_cache.add(file_handle_id=101201, path=path3)

    # DownloadLocation specified, found exact match
    assert utils.equal_paths(my_cache.get(file_handle_id=101201, path=path2), path2)

    # DownloadLocation specified, no match, get most recent
    path = my_cache.get(file_handle_id=101201, path=os.path.join(tmp_dir, "file_is_not_here", "file1.ext"))
    assert utils.equal_paths(path, path3)

    # DownloadLocation specified as a directory, not in cache, get most recent
    empty_dir = os.path.join(tmp_dir, "empty_directory")
    os.makedirs(empty_dir)
    path = my_cache.get(file_handle_id=101201, path=empty_dir)
    assert utils.equal_paths(path, path3)

    # path2 is now modified
    new_time_stamp = cache._get_modified_time(path2)+2
    utils.touch(path2, (new_time_stamp, new_time_stamp))

    # test cache.contains
    assert_false(my_cache.contains(file_handle_id=101201, path=empty_dir))
    assert_false(my_cache.contains(file_handle_id=101201, path=path2))
    assert_false(my_cache.contains(file_handle_id=101999, path=path2))
    assert_true(my_cache.contains(file_handle_id=101201, path=path1))
    assert_true(my_cache.contains(file_handle_id=101201, path=path3))

    # Get file from alternate location. Do we care which file we get?
    assert_is_none(my_cache.get(file_handle_id=101201, path=path2))
    assert_in(my_cache.get(file_handle_id=101201), [utils.normalize_path(path1), utils.normalize_path(path3)])

    # Download uncached file to a specified download location
    assert_is_none(my_cache.get(file_handle_id=101202, path=os.path.join(tmp_dir, "not_in_cache")))

    # No downloadLocation specified, get file from alternate location. Do we care which file we get?
    assert_is_not_none(my_cache.get(file_handle_id=101201))
    assert_in(my_cache.get(file_handle_id=101201), [utils.normalize_path(path1), utils.normalize_path(path3)])

    # test case 2b.
    assert_is_none(my_cache.get(file_handle_id=101202))
Esempio n. 6
0
def test_cache_rules():
    # Cache should (in order of preference):
    #
    # 1. DownloadLocation specified:
    #   a. return exact match (unmodified file at the same path)
    #   b. return an unmodified file at another location,
    #      copy to downloadLocation subject to ifcollision
    #   c. download file to downloadLocation subject to ifcollision
    #
    # 2. DownloadLocation *not* specified:
    #   a. return an unmodified file at another location
    #   b. download file to cache_dir overwritting any existing file
    tmp_dir = tempfile.mkdtemp()
    my_cache = cache.Cache(cache_root_dir=tmp_dir)

    # put file in cache dir
    path1 = utils.touch(
        os.path.join(my_cache.get_cache_dir(101201), "file1.ext"))
    my_cache.add(file_handle_id=101201, path=path1)

    new_time_stamp = cache._get_modified_time(path1) + 1
    path2 = utils.touch(os.path.join(tmp_dir, "not_in_cache", "file1.ext"),
                        (new_time_stamp, new_time_stamp))
    my_cache.add(file_handle_id=101201, path=path2)

    new_time_stamp = cache._get_modified_time(path2) + 1
    path3 = utils.touch(
        os.path.join(tmp_dir, "also_not_in_cache", "file1.ext"),
        (new_time_stamp, new_time_stamp))
    my_cache.add(file_handle_id=101201, path=path3)

    # DownloadLocation specified, found exact match
    assert utils.equal_paths(my_cache.get(file_handle_id=101201, path=path2),
                             path2)

    # DownloadLocation specified, no match, get most recent
    path = my_cache.get(file_handle_id=101201,
                        path=os.path.join(tmp_dir, "file_is_not_here",
                                          "file1.ext"))
    assert utils.equal_paths(path, path3)

    # DownloadLocation specified as a directory, not in cache, get most recent
    empty_dir = os.path.join(tmp_dir, "empty_directory")
    os.makedirs(empty_dir)
    path = my_cache.get(file_handle_id=101201, path=empty_dir)
    assert utils.equal_paths(path, path3)

    # path2 is now modified
    new_time_stamp = cache._get_modified_time(path2) + 2
    utils.touch(path2, (new_time_stamp, new_time_stamp))

    # test cache.contains
    assert_false(my_cache.contains(file_handle_id=101201, path=empty_dir))
    assert_false(my_cache.contains(file_handle_id=101201, path=path2))
    assert_false(my_cache.contains(file_handle_id=101999, path=path2))
    assert_true(my_cache.contains(file_handle_id=101201, path=path1))
    assert_true(my_cache.contains(file_handle_id=101201, path=path3))

    # Get file from alternate location. Do we care which file we get?
    assert_is_none(my_cache.get(file_handle_id=101201, path=path2))
    assert_in(my_cache.get(file_handle_id=101201),
              [utils.normalize_path(path1),
               utils.normalize_path(path3)])

    # Download uncached file to a specified download location
    assert_is_none(
        my_cache.get(file_handle_id=101202,
                     path=os.path.join(tmp_dir, "not_in_cache")))

    # No downloadLocation specified, get file from alternate location. Do we care which file we get?
    assert_is_not_none(my_cache.get(file_handle_id=101201))
    assert_in(my_cache.get(file_handle_id=101201),
              [utils.normalize_path(path1),
               utils.normalize_path(path3)])

    # test case 2b.
    assert_is_none(my_cache.get(file_handle_id=101202))
def test_command_line_using_paths():
    # Create a Project
    project_entity = syn.store(synapseclient.Project(name=str(uuid.uuid4())))
    schedule_for_cleanup(project_entity.id)

    # Create a Folder in Project
    folder_entity = syn.store(
        synapseclient.Folder(name=str(uuid.uuid4()), parent=project_entity))

    # Create and upload a file in Folder
    filename = utils.make_bogus_data_file()
    schedule_for_cleanup(filename)
    file_entity = syn.store(synapseclient.File(filename, parent=folder_entity))

    # Verify that we can use show with a filename
    output = run('synapse', '--skip-checks', 'show', filename)
    id = parse(r'File: %s\s+\((syn\d+)\)\s+' % os.path.split(filename)[1],
               output)
    assert_equals(file_entity.id, id)

    # Verify that limitSearch works by making sure we get the file entity
    # that's inside the folder
    file_entity2 = syn.store(
        synapseclient.File(filename, parent=project_entity))
    output = run('synapse', '--skip-checks', 'get', '--limitSearch',
                 folder_entity.id, filename)
    id = parse(r'Associated file: .* with synapse ID (syn\d+)', output)
    name = parse(r'Associated file: (.*) with synapse ID syn\d+', output)
    assert_equals(file_entity.id, id)
    assert_true(utils.equal_paths(name, filename))

    # Verify that set-provenance works with filepath
    repo_url = 'https://github.com/Sage-Bionetworks/synapsePythonClient'
    output = run('synapse', '--skip-checks', 'set-provenance', '-id',
                 file_entity2.id, '-name', 'TestActivity', '-description',
                 'A very excellent provenance', '-used', filename, '-executed',
                 repo_url, '-limitSearch', folder_entity.id)
    activity_id = parse(r'Set provenance record (\d+) on entity syn\d+',
                        output)

    output = run('synapse', '--skip-checks', 'get-provenance', '-id',
                 file_entity2.id)
    activity = json.loads(output)
    assert_equals(activity['name'], 'TestActivity')
    assert_equals(activity['description'], 'A very excellent provenance')

    # Verify that store works with provenance specified with filepath
    repo_url = 'https://github.com/Sage-Bionetworks/synapsePythonClient'
    filename2 = utils.make_bogus_data_file()
    schedule_for_cleanup(filename2)
    output = run('synapse', '--skip-checks', 'add', filename2, '-parentid',
                 project_entity.id, '-used', filename, '-executed',
                 '%s %s' % (repo_url, filename))
    entity_id = parse(r'Created/Updated entity:\s+(syn\d+)\s+', output)
    output = run('synapse', '--skip-checks', 'get-provenance', '-id',
                 entity_id)
    activity = json.loads(output)
    a = [a for a in activity['used'] if not a['wasExecuted']]
    assert_in(a[0]['reference']['targetId'], [file_entity.id, file_entity2.id])

    # Test associate command
    # I have two files in Synapse filename and filename2
    path = tempfile.mkdtemp()
    schedule_for_cleanup(path)
    shutil.copy(filename, path)
    shutil.copy(filename2, path)
    run('synapse', '--skip-checks', 'associate', path, '-r')
    run('synapse', '--skip-checks', 'show', filename)
def test_Entity():
    # Update the project
    project_name = str(uuid.uuid4())
    project = Project(name=project_name)
    project = syn.store(project)
    schedule_for_cleanup(project)
    project = syn.getEntity(project)
    assert project.name == project_name

    # Create and get a Folder
    folder = Folder('Test Folder',
                    parent=project,
                    description='A place to put my junk',
                    foo=1000)
    folder = syn.createEntity(folder)
    folder = syn.getEntity(folder)
    assert folder.name == 'Test Folder'
    assert folder.parentId == project.id
    assert folder.description == 'A place to put my junk'
    assert folder.foo[0] == 1000

    # Update and get the Folder
    folder.pi = 3.14159265359
    folder.description = 'The rejects from the other folder'
    folder = syn.store(folder)
    folder = syn.get(folder)
    assert folder.name == 'Test Folder'
    assert folder.parentId == project.id
    assert folder.description == 'The rejects from the other folder'
    assert folder.pi[0] == 3.14159265359

    # Test CRUD on Files, check unicode
    path = utils.make_bogus_data_file()
    schedule_for_cleanup(path)
    a_file = File(path,
                  parent=folder,
                  description=
                  u'Description with funny characters: Déjà vu, ประเทศไทย, 中国',
                  contentType='text/flapdoodle',
                  foo='An arbitrary value',
                  bar=[33, 44, 55],
                  bday=Datetime(2013, 3, 15),
                  band=u"Motörhead",
                  lunch=u"すし")
    a_file = syn.store(a_file)
    assert a_file.path == path

    a_file = syn.getEntity(a_file)
    assert a_file.description == u'Description with funny characters: Déjà vu, ประเทศไทย, 中国', u'description= %s' % a_file.description
    assert a_file['foo'][
        0] == 'An arbitrary value', u'foo= %s' % a_file['foo'][0]
    assert a_file['bar'] == [33, 44, 55]
    assert a_file['bday'][0] == Datetime(2013, 3, 15)
    assert a_file.contentType == 'text/flapdoodle', u'contentType= %s' % a_file.contentType
    assert a_file['band'][0] == u"Motörhead", u'band= %s' % a_file['band'][0]
    assert a_file['lunch'][0] == u"すし", u'lunch= %s' % a_file['lunch'][0]

    a_file = syn.downloadEntity(a_file)
    assert filecmp.cmp(path, a_file.path)

    # Update the File
    a_file.path = path
    a_file['foo'] = 'Another arbitrary chunk of text data'
    a_file['new_key'] = 'A newly created value'
    a_file = syn.updateEntity(a_file)
    assert a_file['foo'][0] == 'Another arbitrary chunk of text data'
    assert a_file['bar'] == [33, 44, 55]
    assert a_file['bday'][0] == Datetime(2013, 3, 15)
    assert a_file.new_key[0] == 'A newly created value'
    assert a_file.path == path
    assert a_file.versionNumber == 1, "unexpected version number: " + str(
        a_file.versionNumber)

    #Test create, store, get Links
    link = Link(a_file['id'],
                targetVersion=a_file.versionNumber,
                parent=project)
    link = syn.store(link)
    assert link['linksTo']['targetId'] == a_file['id']
    assert link['linksTo']['targetVersionNumber'] == a_file.versionNumber
    assert link['linksToClassName'] == a_file['concreteType']

    testLink = syn.get(link, followLink=False)
    assert testLink == link

    link = syn.getEntity(link)
    assert link['foo'][0] == 'Another arbitrary chunk of text data'
    assert link['bar'] == [33, 44, 55]
    assert link['bday'][0] == Datetime(2013, 3, 15)
    assert link.new_key[0] == 'A newly created value'
    assert utils.equal_paths(link.path, path)
    assert link.versionNumber == 1, "unexpected version number: " + str(
        a_file.versionNumber)

    # Upload a new File and verify
    new_path = utils.make_bogus_data_file()
    schedule_for_cleanup(new_path)
    a_file = syn.uploadFile(a_file, new_path)
    a_file = syn.downloadEntity(a_file)
    assert filecmp.cmp(new_path, a_file.path)
    assert a_file.versionNumber == 2

    # Make sure we can still get the older version of file
    old_random_data = syn.get(a_file.id, version=1)
    assert filecmp.cmp(old_random_data.path, path)

    tmpdir = tempfile.mkdtemp()
    schedule_for_cleanup(tmpdir)

    ## test file name override
    a_file.fileNameOverride = "peaches_en_regalia.zoinks"
    syn.store(a_file)
    ## TODO We haven't defined how filename override interacts with
    ## TODO previously cached files so, side-step that for now by
    ## TODO making sure the file is not in the cache!
    syn.cache.remove(a_file.dataFileHandleId, delete=True)
    a_file_retreived = syn.get(a_file, downloadLocation=tmpdir)
    assert os.path.basename(
        a_file_retreived.path) == a_file.fileNameOverride, os.path.basename(
            a_file_retreived.path)

    ## test getting the file from the cache with downloadLocation parameter (SYNPY-330)
    a_file_cached = syn.get(a_file.id, downloadLocation=tmpdir)
    assert a_file_cached.path is not None
    assert os.path.basename(
        a_file_cached.path) == a_file.fileNameOverride, a_file_cached.path

    print("\n\nList of files in project:\n")
    syn._list(project, recursive=True)
def test_command_line_using_paths():
    # Create a Project
    project_entity = syn.store(synapseclient.Project(name=str(uuid.uuid4())))
    schedule_for_cleanup(project_entity.id)

    # Create a Folder in Project
    folder_entity = syn.store(synapseclient.Folder(name=str(uuid.uuid4()), parent=project_entity))

    # Create and upload a file in Folder
    filename = utils.make_bogus_data_file()
    schedule_for_cleanup(filename)
    file_entity = syn.store(synapseclient.File(filename, parent=folder_entity))

    # Verify that we can use show with a filename
    output = run('synapse', '--skip-checks', 'show', filename)
    id = parse(r'File: %s\s+\((syn\d+)\)\s+' %os.path.split(filename)[1], output)
    assert file_entity.id == id

    # Verify that limitSearch works by making sure we get the file entity
    # that's inside the folder
    file_entity2 = syn.store(synapseclient.File(filename, parent=project_entity))
    output = run('synapse', '--skip-checks', 'get', 
                 '--limitSearch', folder_entity.id, 
                 filename)
    print("output = \"", output, "\"")
    id = parse(r'Associated file: .* with synapse ID (syn\d+)', output)
    name = parse(r'Associated file: (.*) with synapse ID syn\d+', output)
    assert_equals(file_entity.id, id)
    assert utils.equal_paths(name, filename)

    #Verify that set-provenance works with filepath
    repo_url = 'https://github.com/Sage-Bionetworks/synapsePythonClient'
    output = run('synapse', '--skip-checks', 'set-provenance', 
                 '-id', file_entity2.id,
                 '-name', 'TestActivity', 
                 '-description', 'A very excellent provenance', 
                 '-used', filename,
                 '-executed', repo_url,
                 '-limitSearch', folder_entity.id)
    activity_id = parse(r'Set provenance record (\d+) on entity syn\d+', output)

    output = run('synapse', '--skip-checks', 'get-provenance', 
                 '-id', file_entity2.id)
    activity = json.loads(output)
    assert activity['name'] == 'TestActivity'
    assert activity['description'] == 'A very excellent provenance'
    
    #Verify that store works with provenance specified with filepath
    repo_url = 'https://github.com/Sage-Bionetworks/synapsePythonClient'
    filename2 = utils.make_bogus_data_file()
    schedule_for_cleanup(filename2)
    output = run('synapse', '--skip-checks', 'add', filename2, 
                 '-parentid', project_entity.id, 
                 '-used', filename,
                 '-executed', '%s %s' %(repo_url, filename))
    entity_id = parse(r'Created/Updated entity:\s+(syn\d+)\s+', output)
    output = run('synapse', '--skip-checks', 'get-provenance', 
                 '-id', entity_id)
    activity = json.loads(output)
    a = [a for a in activity['used'] if a['wasExecuted']==False]
    assert a[0]['reference']['targetId'] in [file_entity.id, file_entity2.id]
    
    #Test associate command
    #I have two files in Synapse filename and filename2
    path = tempfile.mkdtemp()
    schedule_for_cleanup(path)
    shutil.copy(filename, path)
    shutil.copy(filename2, path)
    output = run('synapse', '--skip-checks', 'associate', path, '-r')
    output = run('synapse', '--skip-checks', 'show', filename)
def test_Entity():
    # Update the project
    project_name = str(uuid.uuid4())
    project = Project(name=project_name)
    project = syn.store(project)
    schedule_for_cleanup(project)
    project = syn.getEntity(project)
    assert project.name == project_name
    
    # Create and get a Folder
    folder = Folder('Test Folder', parent=project, description='A place to put my junk', foo=1000)
    folder = syn.createEntity(folder)
    folder = syn.getEntity(folder)
    assert folder.name == 'Test Folder'
    assert folder.parentId == project.id
    assert folder.description == 'A place to put my junk'
    assert folder.foo[0] == 1000
    
    # Update and get the Folder
    folder.pi = 3.14159265359
    folder.description = 'The rejects from the other folder'
    folder = syn.store(folder)
    folder = syn.get(folder)
    assert folder.name == 'Test Folder'
    assert folder.parentId == project.id
    assert folder.description == 'The rejects from the other folder'
    assert folder.pi[0] == 3.14159265359

    # Test CRUD on Files, check unicode
    path = utils.make_bogus_data_file()
    schedule_for_cleanup(path)
    a_file = File(path, parent=folder, description=u'Description with funny characters: Déjà vu, ประเทศไทย, 中国',
                  contentType='text/flapdoodle',
                  foo='An arbitrary value',
                  bar=[33,44,55],
                  bday=Datetime(2013,3,15),
                  band=u"Motörhead",
                  lunch=u"すし")
    a_file = syn.store(a_file)
    assert a_file.path == path

    a_file = syn.getEntity(a_file)
    assert a_file.description == u'Description with funny characters: Déjà vu, ประเทศไทย, 中国', u'description= %s' % a_file.description
    assert a_file['foo'][0] == 'An arbitrary value', u'foo= %s' % a_file['foo'][0]
    assert a_file['bar'] == [33,44,55]
    assert a_file['bday'][0] == Datetime(2013,3,15)
    assert a_file.contentType == 'text/flapdoodle', u'contentType= %s' % a_file.contentType
    assert a_file['band'][0] == u"Motörhead", u'band= %s' % a_file['band'][0]
    assert a_file['lunch'][0] == u"すし", u'lunch= %s' % a_file['lunch'][0]
    
    a_file = syn.downloadEntity(a_file)
    assert filecmp.cmp(path, a_file.path)

    assert_raises(ValueError,File,a_file.path,parent=folder,dataFileHandleId=56456)
    b_file = File(name="blah",parent=folder,dataFileHandleId=a_file.dataFileHandleId)
    b_file = syn.store(b_file)

    assert b_file.dataFileHandleId == a_file.dataFileHandleId
    # Update the File
    a_file.path = path
    a_file['foo'] = 'Another arbitrary chunk of text data'
    a_file['new_key'] = 'A newly created value'
    a_file = syn.updateEntity(a_file)
    assert a_file['foo'][0] == 'Another arbitrary chunk of text data'
    assert a_file['bar'] == [33,44,55]
    assert a_file['bday'][0] == Datetime(2013,3,15)
    assert a_file.new_key[0] == 'A newly created value'
    assert a_file.path == path
    assert a_file.versionNumber == 1, "unexpected version number: " +  str(a_file.versionNumber)

    #Test create, store, get Links
    link = Link(a_file['id'], 
                targetVersion=a_file.versionNumber,
                parent=project)
    link = syn.store(link)
    assert link['linksTo']['targetId'] == a_file['id']
    assert link['linksTo']['targetVersionNumber'] == a_file.versionNumber
    assert link['linksToClassName'] == a_file['concreteType']
    
    testLink = syn.get(link)
    assert testLink == link

    link = syn.get(link,followLink= True)
    assert link['foo'][0] == 'Another arbitrary chunk of text data'
    assert link['bar'] == [33,44,55]
    assert link['bday'][0] == Datetime(2013,3,15)
    assert link.new_key[0] == 'A newly created value'
    assert utils.equal_paths(link.path, path)
    assert link.versionNumber == 1, "unexpected version number: " +  str(a_file.versionNumber)

    # Upload a new File and verify
    new_path = utils.make_bogus_data_file()
    schedule_for_cleanup(new_path)
    a_file = syn.uploadFile(a_file, new_path)
    a_file = syn.downloadEntity(a_file)
    assert filecmp.cmp(new_path, a_file.path)
    assert a_file.versionNumber == 2

    # Make sure we can still get the older version of file
    old_random_data = syn.get(a_file.id, version=1)
    assert filecmp.cmp(old_random_data.path, path)

    tmpdir = tempfile.mkdtemp()
    schedule_for_cleanup(tmpdir)

    ## test file name override
    a_file.fileNameOverride = "peaches_en_regalia.zoinks"
    syn.store(a_file)
    ## TODO We haven't defined how filename override interacts with
    ## TODO previously cached files so, side-step that for now by
    ## TODO making sure the file is not in the cache!
    syn.cache.remove(a_file.dataFileHandleId, delete=True)
    a_file_retreived = syn.get(a_file, downloadLocation=tmpdir)
    assert os.path.basename(a_file_retreived.path) == a_file.fileNameOverride, os.path.basename(a_file_retreived.path)

    ## test getting the file from the cache with downloadLocation parameter (SYNPY-330)
    a_file_cached = syn.get(a_file.id, downloadLocation=tmpdir)
    assert a_file_cached.path is not None
    assert os.path.basename(a_file_cached.path) == a_file.fileNameOverride, a_file_cached.path

    print("\n\nList of files in project:\n")
    syn._list(project, recursive=True)