def test_cache_remove(): tmp_dir = tempfile.mkdtemp() my_cache = cache.Cache(cache_root_dir=tmp_dir) path1 = utils.touch(os.path.join(my_cache.get_cache_dir(101201), "file1.ext")) my_cache.add(file_handle_id=101201, path=path1) alt_dir = tempfile.mkdtemp() path2 = utils.touch(os.path.join(alt_dir, "file2.ext")) my_cache.add(file_handle_id=101201, path=path2) # remove the cached copy at path1 rp = my_cache.remove({'dataFileHandleId': 101201, 'path': path1}) assert len(rp) == 1 assert utils.equal_paths(rp[0], path1) assert utils.equal_paths(my_cache.get(101201), path2) my_cache.remove(101201) assert_is_none(my_cache.get(101201))
def test_cache_store_get(): tmp_dir = tempfile.mkdtemp() my_cache = cache.Cache(cache_root_dir=tmp_dir) path1 = utils.touch( os.path.join(my_cache.get_cache_dir(101201), "file1.ext")) my_cache.add(file_handle_id=101201, path=path1) path2 = utils.touch( os.path.join(my_cache.get_cache_dir(101202), "file2.ext")) my_cache.add(file_handle_id=101202, path=path2) # set path3's mtime to be later than path2's new_time_stamp = cache._get_modified_time(path2) + 2 path3 = utils.touch(os.path.join(tmp_dir, "foo", "file2.ext"), (new_time_stamp, new_time_stamp)) my_cache.add(file_handle_id=101202, path=path3) a_file = my_cache.get(file_handle_id=101201) assert utils.equal_paths(a_file, path1) a_file = my_cache.get(file_handle_id=101201, path=path1) assert utils.equal_paths(a_file, path1) a_file = my_cache.get(file_handle_id=101201, path=my_cache.get_cache_dir(101201)) assert utils.equal_paths(a_file, path1) b_file = my_cache.get(file_handle_id=101202, path=os.path.dirname(path2)) assert utils.equal_paths(b_file, path2) b_file = my_cache.get(file_handle_id=101202, path=os.path.dirname(path3)) assert utils.equal_paths(b_file, path3) not_in_cache_file = my_cache.get(file_handle_id=101203, path=tmp_dir) assert not_in_cache_file is None removed = my_cache.remove(file_handle_id=101201, path=path1, delete=True) assert utils.normalize_path(path1) in removed assert len(removed) == 1 assert my_cache.get(file_handle_id=101201) is None removed = my_cache.remove(file_handle_id=101202, path=path3, delete=True) b_file = my_cache.get(file_handle_id=101202) assert utils.normalize_path(path3) in removed assert len(removed) == 1 assert utils.equal_paths(b_file, path2) removed = my_cache.remove(file_handle_id=101202, delete=True) assert utils.normalize_path(path2) in removed assert len(removed) == 1 assert my_cache.get(file_handle_id=101202) is None
def migrate_old_session_file_credentials_if_necessary(syn): old_session_file_path = os.path.join(syn.cache.cache_root_dir, '.session') # only migrate if the download cache is in the default location (i.e. user did not set its location) # we don't want to migrate credentials if they were a part of a cache shared by multiple people if equal_paths(syn.cache.cache_root_dir, os.path.expanduser(CACHE_ROOT_DIR)): # iterate through the old file and place in new credential storage old_session_dict = _read_session_cache(old_session_file_path) for key, value in old_session_dict.items(): if key == "<mostRecent>": set_most_recent_user(value) else: set_api_key(key, value) # always attempt to remove the old session file try: os.remove(old_session_file_path) except OSError: # file already removed. pass
def test_command_line_using_paths(test_state): # Create a Project project_entity = test_state.syn.store(Project(name=str(uuid.uuid4()))) test_state.schedule_for_cleanup(project_entity.id) # Create a Folder in Project folder_entity = test_state.syn.store(Folder(name=str(uuid.uuid4()), parent=project_entity)) # Create and upload a file in Folder filename = utils.make_bogus_data_file() test_state.schedule_for_cleanup(filename) file_entity = test_state.syn.store(File(filename, parent=folder_entity)) # Verify that we can use show with a filename output = run(test_state, 'synapse' '--skip-checks', 'show', filename) id = parse(r'File: %s\s+\((syn\d+)\)\s+' % os.path.split(filename)[1], output) assert file_entity.id == id # Verify that limitSearch works by making sure we get the file entity # that's inside the folder file_entity2 = test_state.syn.store(File(filename, parent=project_entity)) output = run(test_state, 'synapse' '--skip-checks', 'get', '--limitSearch', folder_entity.id, filename) id = parse(r'Associated file: .* with synapse ID (syn\d+)', output) name = parse(r'Associated file: (.*) with synapse ID syn\d+', output) assert file_entity.id == id assert utils.equal_paths(name, filename) # Verify that set-provenance works with filepath repo_url = 'https://github.com/Sage-Bionetworks/synapsePythonClient' output = run(test_state, 'synapse' '--skip-checks', 'set-provenance', '-id', file_entity2.id, '-name', 'TestActivity', '-description', 'A very excellent provenance', '-used', filename, '-executed', repo_url, '-limitSearch', folder_entity.id) parse(r'Set provenance record (\d+) on entity syn\d+', output) output = run(test_state, 'synapse' '--skip-checks', 'get-provenance', '-id', file_entity2.id) activity = json.loads(output) assert activity['name'] == 'TestActivity' assert activity['description'] == 'A very excellent provenance' # Verify that store works with provenance specified with filepath repo_url = 'https://github.com/Sage-Bionetworks/synapsePythonClient' filename2 = utils.make_bogus_data_file() test_state.schedule_for_cleanup(filename2) output = run(test_state, 'synapse' '--skip-checks', 'add', filename2, '-parentid', project_entity.id, '-used', filename, '-executed', '%s %s' % (repo_url, filename)) entity_id = parse(r'Created/Updated entity:\s+(syn\d+)\s+', output) output = run(test_state, 'synapse' '--skip-checks', 'get-provenance', '-id', entity_id) activity = json.loads(output) a = [a for a in activity['used'] if not a['wasExecuted']] assert a[0]['reference']['targetId'] in [file_entity.id, file_entity2.id] # Test associate command # I have two files in Synapse filename and filename2 path = tempfile.mkdtemp() test_state.schedule_for_cleanup(path) shutil.copy(filename, path) shutil.copy(filename2, path) run(test_state, 'synapse' '--skip-checks', 'associate', path, '-r') run(test_state, 'synapse' '--skip-checks', 'show', filename)
def test_Entity(syn, project, schedule_for_cleanup): # Update the project project_name = str(uuid.uuid4()) project = Project(name=project_name) project = syn.store(project) schedule_for_cleanup(project) project = syn.get(project) assert project.name == project_name # Create and get a Folder folder = Folder('Test Folder', parent=project, description='A place to put my junk', foo=1000) folder = syn.store(folder) folder = syn.get(folder) assert folder.name == 'Test Folder' assert folder.parentId == project.id assert folder.description == 'A place to put my junk' assert folder.foo[0] == 1000 # Update and get the Folder folder.pi = 3.14159265359 folder.description = 'The rejects from the other folder' folder = syn.store(folder) folder = syn.get(folder) assert folder.name == 'Test Folder' assert folder.parentId == project.id assert folder.description == 'The rejects from the other folder' assert folder.pi[0] == 3.14159265359 # Test CRUD on Files, check unicode path = utils.make_bogus_data_file() schedule_for_cleanup(path) a_file = File(path, parent=folder, description=u'Description with funny characters: Déjà vu, ประเทศไทย, 中国', contentType='text/flapdoodle', foo='An arbitrary value', bar=[33, 44, 55], bday=Datetime(2013, 3, 15), band=u"Motörhead", lunch=u"すし") a_file = syn.store(a_file) assert a_file.path == path a_file = syn.get(a_file) assert a_file.description ==\ u'Description with funny characters: Déjà vu, ประเทศไทย, 中国', \ u'description= %s' % a_file.description assert a_file['foo'][0] == 'An arbitrary value', u'foo= %s' % a_file['foo'][0] assert a_file['bar'] == [33, 44, 55] assert a_file['bday'][0] == Datetime(2013, 3, 15) assert a_file.contentType == 'text/flapdoodle', u'contentType= %s' % a_file.contentType assert a_file['band'][0] == u"Motörhead", u'band= %s' % a_file['band'][0] assert a_file['lunch'][0] == u"すし", u'lunch= %s' % a_file['lunch'][0] a_file = syn.get(a_file) assert filecmp.cmp(path, a_file.path) b_file = File(name="blah", parent=folder, dataFileHandleId=a_file.dataFileHandleId) b_file = syn.store(b_file) assert b_file.dataFileHandleId == a_file.dataFileHandleId # Update the File a_file.path = path a_file['foo'] = 'Another arbitrary chunk of text data' a_file['new_key'] = 'A newly created value' a_file = syn.store(a_file, forceVersion=False) assert a_file['foo'][0] == 'Another arbitrary chunk of text data' assert a_file['bar'] == [33, 44, 55] assert a_file['bday'][0] == Datetime(2013, 3, 15) assert a_file.new_key[0] == 'A newly created value' assert a_file.path == path assert a_file.versionNumber == 1, "unexpected version number: " + str(a_file.versionNumber) # Test create, store, get Links # If version isn't specified, targetVersionNumber should not be set link = Link(a_file['id'], parent=project) link = syn.store(link) assert link['linksTo']['targetId'] == a_file['id'] assert link['linksTo'].get('targetVersionNumber') is None assert link['linksToClassName'] == a_file['concreteType'] link = Link(a_file['id'], targetVersion=a_file.versionNumber, parent=project) link = syn.store(link) assert link['linksTo']['targetId'] == a_file['id'] assert link['linksTo']['targetVersionNumber'] == a_file.versionNumber assert link['linksToClassName'] == a_file['concreteType'] testLink = syn.get(link) assert testLink == link link = syn.get(link, followLink=True) assert link['foo'][0] == 'Another arbitrary chunk of text data' assert link['bar'] == [33, 44, 55] assert link['bday'][0] == Datetime(2013, 3, 15) assert link.new_key[0] == 'A newly created value' assert utils.equal_paths(link.path, path) assert link.versionNumber == 1, "unexpected version number: " + str(a_file.versionNumber) newfolder = Folder('Testing Folder', parent=project) newfolder = syn.store(newfolder) link = Link(newfolder, parent=folder.id) link = syn.store(link) assert link['linksTo']['targetId'] == newfolder.id assert link['linksToClassName'] == newfolder['concreteType'] assert link['linksTo'].get('targetVersionNumber') is None # Upload a new File and verify new_path = utils.make_bogus_data_file() schedule_for_cleanup(new_path) a_file.path = new_path a_file = syn.store(a_file) a_file = syn.get(a_file) assert filecmp.cmp(new_path, a_file.path) assert a_file.versionNumber == 2 # Make sure we can still get the older version of file old_random_data = syn.get(a_file.id, version=1) assert filecmp.cmp(old_random_data.path, path) tmpdir = tempfile.mkdtemp() schedule_for_cleanup(tmpdir) # test getting the file from the cache with downloadLocation parameter (SYNPY-330) a_file_cached = syn.get(a_file.id, downloadLocation=tmpdir) assert a_file_cached.path is not None assert os.path.basename(a_file_cached.path) == os.path.basename(a_file.path)
def test_cache_rules(): # Cache should (in order of preference): # # 1. DownloadLocation specified: # a. return exact match (unmodified file at the same path) # b. return an unmodified file at another location, # copy to downloadLocation subject to ifcollision # c. download file to downloadLocation subject to ifcollision # # 2. DownloadLocation *not* specified: # a. return an unmodified file at another location # b. download file to cache_dir overwritting any existing file tmp_dir = tempfile.mkdtemp() my_cache = cache.Cache(cache_root_dir=tmp_dir) # put file in cache dir path1 = utils.touch(os.path.join(my_cache.get_cache_dir(101201), "file1.ext")) my_cache.add(file_handle_id=101201, path=path1) new_time_stamp = cache._get_modified_time(path1) + 1 path2 = utils.touch(os.path.join(tmp_dir, "not_in_cache", "file1.ext"), (new_time_stamp, new_time_stamp)) my_cache.add(file_handle_id=101201, path=path2) new_time_stamp = cache._get_modified_time(path2) + 1 path3 = utils.touch(os.path.join(tmp_dir, "also_not_in_cache", "file1.ext"), (new_time_stamp, new_time_stamp)) my_cache.add(file_handle_id=101201, path=path3) # DownloadLocation specified, found exact match assert utils.equal_paths(my_cache.get(file_handle_id=101201, path=path2), path2) # DownloadLocation specified, no match, get most recent path = my_cache.get(file_handle_id=101201, path=os.path.join(tmp_dir, "file_is_not_here", "file1.ext")) assert utils.equal_paths(path, path3) # DownloadLocation specified as a directory, not in cache, get most recent empty_dir = os.path.join(tmp_dir, "empty_directory") os.makedirs(empty_dir) path = my_cache.get(file_handle_id=101201, path=empty_dir) assert utils.equal_paths(path, path3) # path2 is now modified new_time_stamp = cache._get_modified_time(path2) + 2 utils.touch(path2, (new_time_stamp, new_time_stamp)) # test cache.contains assert_false(my_cache.contains(file_handle_id=101201, path=empty_dir)) assert_false(my_cache.contains(file_handle_id=101201, path=path2)) assert_false(my_cache.contains(file_handle_id=101999, path=path2)) assert_true(my_cache.contains(file_handle_id=101201, path=path1)) assert_true(my_cache.contains(file_handle_id=101201, path=path3)) # Get file from alternate location. Do we care which file we get? assert_is_none(my_cache.get(file_handle_id=101201, path=path2)) assert_in(my_cache.get(file_handle_id=101201), [utils.normalize_path(path1), utils.normalize_path(path3)]) # Download uncached file to a specified download location assert_is_none(my_cache.get(file_handle_id=101202, path=os.path.join(tmp_dir, "not_in_cache"))) # No downloadLocation specified, get file from alternate location. Do we care which file we get? assert_is_not_none(my_cache.get(file_handle_id=101201)) assert_in(my_cache.get(file_handle_id=101201), [utils.normalize_path(path1), utils.normalize_path(path3)]) # test case 2b. assert_is_none(my_cache.get(file_handle_id=101202))