def upload_folder(self, local_folder): assert isinstance(local_folder, File) assert local_folder.is_folder # PUT /v1/resources/6/providers/osfstorage/21/?kind=folder&name=FUN_FOLDER HTTP/1.1" 200 - params = { 'kind': 'folder', 'name': local_folder.name, } files_url = api_url_for( RESOURCES, node_id=local_folder.node.osf_id, provider=local_folder.provider, file_id=local_folder.parent.osf_id if local_folder.has_parent else None ) resp_json = yield from self.make_request(files_url, method="PUT", params=params, get_json=True) AlertHandler.info(local_folder.name, AlertHandler.UPLOAD) # todo: determine whether uploaded folder will contain api url for its children new_file_id = resp_json['data']['id'].split('/')[1] children_url = api_url_for(NODES, related_type=FILES, node_id=local_folder.node.osf_id, provider=local_folder.provider, file_id=new_file_id) resp_json['data']['relationships'] = { 'files': { 'links': { 'related': { 'href': children_url } } } } # https://staging-api.osf.io/v2/nodes/4e6k8/files/osfstorage/562134f1029bdb6c230f2874/ # ['relationships']['files']['links']['related']['href'] return dict_to_remote_object(resp_json['data'])
def _move_remote_file_folder(self, local_file_folder): url = api_url_for(RESOURCES, node_id=local_file_folder.node.osf_id, provider=local_file_folder.provider, file_id=local_file_folder.osf_id) data = { 'action': 'move', 'path': local_file_folder.parent.osf_path if local_file_folder.parent else '/', 'rename': local_file_folder.name } resp = yield from self.make_request(url, method="POST", data=json.dumps(data)) resp.close() local_file_folder.locally_moved = False # get the updated remote folder # inner_response = requests.get(remote_file_folder['links']['self'], headers=self.headers).json() # we know exactly what changed, so its faster to just change the remote dictionary rather # than making a new api call. # todo: can get the file folder from the osf by making request to parent file folder (local.parent.osf_id,) # todo: and then searching for the correct child based on osf_id. # todo: move can change NODE. THUS, need to REMOVE local_node=local_node in check_file_folder code... # for now, just going to stop synching this things children... NOT PROPER!!!!! # new_remote_file_folder = ... return None
def upload_file(self, local_file): """ THROWS FileNotFoundError !!!!!! :param local_file: :return: """ assert isinstance(local_file, File) assert local_file.is_file # /v1/resources/6/providers/osfstorage/21/?kind=file&name=FUN_FILE HTTP/1.1" 200 - params = {'provider': local_file.provider, 'name': local_file.name} parent_osf_id = local_file.parent.osf_id if local_file.has_parent else None files_url = api_url_for(RESOURCES, node_id=local_file.node.osf_id, provider=local_file.provider, file_id=parent_osf_id) file = open(local_file.path, 'rb') resp_json = yield from self.make_request(files_url, method="PUT", params=params, data=file, get_json=True) AlertHandler.info(local_file.name, AlertHandler.UPLOAD) return RemoteFile(resp_json['data'])
def check_osf(self, remote_user): assert isinstance(remote_user, dict) assert remote_user['type'] == 'users' remote_user_id = remote_user['id'] nodes_url = api_url_for(USERS, related_type=NODES, user_id=remote_user_id) while True: logger.info('Begining OSF poll') # get local top level nodes local_projects = self.user.top_level_nodes remote_projects = yield from self.osf_query.get_top_level_nodes(nodes_url) paired_projects = self.make_local_remote_tuple_list(local_projects, remote_projects) session.refresh(self.user) sync_list = self.user.guid_for_top_level_nodes_to_sync logger.debug('sync list is: {}'.format(sync_list)) for local, remote in paired_projects: if not remote or remote.id not in sync_list: continue yield from self.check_node(local, remote, local_parent_node=None) yield from self.queue.join() AlertHandler.up_to_date() logger.debug('---------SHOULD HAVE ALL OSF FILES---------') yield from asyncio.sleep(POLL_DELAY)
def get_node_file_folders(node_id): node_files_url = api_url_for(NODES, related_type=FILES, node_id=node_id) resp = session.get(node_files_url) assert resp.ok osf_storage_folder = RemoteFolder(resp.json()["data"][0]) assert osf_storage_folder.provider == osf_storage_folder.name children_resp = session.get(osf_storage_folder.child_files_url) assert children_resp.ok return [dict_to_remote_object(file_folder) for file_folder in children_resp.json()["data"]]
def get_remote_user(self): url = api_url_for(USERS, user_id=self.user.osf_id) logger.debug(url) sleep_time = 0 while True: try: return (yield from self.osf_query.make_request(url, get_json=True))['data'] except aiohttp.ClientError: pass sleep_time += 5 logger.error('Unable to fetch user from OSF, sleeping for {} seconds'.format(sleep_time)) yield from asyncio.sleep(sleep_time)
def upload_folder(self, local_folder): assert isinstance(local_folder, File) assert local_folder.is_folder # PUT /v1/resources/6/providers/osfstorage/21/?kind=folder&name=FUN_FOLDER HTTP/1.1" 200 - params = { 'kind': 'folder', 'name': local_folder.name, } files_url = api_url_for(RESOURCES, node_id=local_folder.node.osf_id, provider=local_folder.provider, file_id=local_folder.parent.osf_id if local_folder.has_parent else None) resp_json = yield from self.make_request(files_url, method="PUT", params=params, get_json=True) AlertHandler.info(local_folder.name, AlertHandler.UPLOAD) # todo: determine whether uploaded folder will contain api url for its children new_file_id = resp_json['data']['id'].split('/')[1] children_url = api_url_for(NODES, related_type=FILES, node_id=local_folder.node.osf_id, provider=local_folder.provider, file_id=new_file_id) resp_json['data']['relationships'] = { 'files': { 'links': { 'related': { 'href': children_url } } } } # https://staging-api.osf.io/v2/nodes/4e6k8/files/osfstorage/562134f1029bdb6c230f2874/ # ['relationships']['files']['links']['related']['href'] return dict_to_remote_object(resp_json['data'])
def get_node_file_folders(node_id): node_files_url = api_url_for(NODES, related_type=FILES, node_id=node_id) resp = session.get(node_files_url) assert resp.ok osf_storage_folder = RemoteFolder(resp.json()['data'][0]) assert osf_storage_folder.provider == osf_storage_folder.name children_resp = session.get(osf_storage_folder.child_files_url) assert children_resp.ok return [ dict_to_remote_object(file_folder) for file_folder in children_resp.json()['data'] ]
def create_new_node(title, parent=None): # todo: relate to parent node body = { "data": { "type": "nodes", # required "attributes": {"title": title, "category": "Project"}, # required # required } } headers["Content-Type"] = "application/json" headers["Accept"] = "application/json" headers["Cookie"] = "osf_staging=55fc5f29029bdb53541b5cda.wTLtvhA3IyD-UGpB3pr7YXIWHvc;" ret = requests.post(api_url_for(NODES), data=json.dumps(body), headers=headers) return RemoteNode(ret.json()["data"]).id
def setUp(self): self.created_user = RemoteUser(requests.post(api_url_for(USERS),data={'fullname':'hi'}).json()['data']) assert self.created_user.name == 'hi' headers = {'Authorization':'Bearer {}'.format(self.created_user.id)} self.session = requests.Session() self.session.headers.update(headers) self.user_resp = self.session.get(api_url_for(USERS, user_id=self.created_user.id)).json()['data'] self.created_node = RemoteNode(self.session.post(api_url_for(NODES), data={'title':'new_node'}).json()['data']) assert self.created_node.name == 'new_node' self.node_resp = self.session.get(self.user_resp['relationships']['nodes']['links']['related']).json()['data'][0] self.folder_provider_resp = self.session.get(self.node_resp['relationships']['files']['links']['related'], headers=headers).json()['data'][0] params = { 'name':'FUN_FOLDER' } self.created_folder= RemoteFolder( self.session.put(self.folder_provider_resp['links']['new_folder'], params=params, headers=headers).json()['data'] ) assert self.created_folder.name == 'FUN_FOLDER' #create another folder self.session.put(self.folder_provider_resp['links']['new_folder'],params={'name':'another folder'}, headers=headers).json()['data'] self.folder_resp = self.session.get(self.folder_provider_resp['relationships']['files']['links']['related']['href'], headers=headers).json()['data'][0] self.folder2_resp =self.session.get(self.folder_provider_resp['relationships']['files']['links']['related']['href'], headers=headers).json()['data'][1] #create file with contents self.file_contents = ''.join(chr(x) for x in range(128)) self.session.put(self.folder2_resp['links']['upload'], headers=headers, params={'name':'myfile.txt'}, data=self.file_contents).json() self.file_resp = self.session.get(self.folder2_resp['relationships']['files']['links']['related']['href'], headers=headers).json()['data'][0]
def create_test_user(): ret = requests.post( api_url_for(USERS), data={ 'fullname': "new_test_user" }) assert ret.status_code == 200 to_print = 'test user created. Open OSF-Offline to start testing. Use the following credentials:' \ '\nEmail: {email}' \ '\nPassword: {password}'.format( email=ret.json()['data']['id'], password=ret.json()['data']['id'] ) print(to_print) return ret.json()['data']['id']
def get_remote_user(self): url = api_url_for(USERS, user_id=self.user.osf_id) logger.debug(url) sleep_time = 0 while True: try: return (yield from self.osf_query.make_request(url, get_json=True))['data'] except aiohttp.ClientError: pass sleep_time += 5 logger.error( 'Unable to fetch user from OSF, sleeping for {} seconds'. format(sleep_time)) yield from asyncio.sleep(sleep_time)
def create_new_project(user_id): body = { "data": { "type": "nodes", # required "attributes": { "title": 'new_test_project', # required "category": 'Project', # required } } } headers = {} headers['Authorization'] = 'Bearer {}'.format(user_id) headers['Content-Type'] = 'application/json' headers['Accept'] = 'application/json' ret = requests.post(api_url_for(NODES), data=json.dumps(body), headers=headers) print('new_test_project created for user {}'.format(user_id)) return RemoteNode(ret.json()['data']).id
def create_new_node(title, parent=None): #todo: relate to parent node body = { "data": { "type": "nodes", # required "attributes": { "title": title, # required "category": 'Project', # required } } } headers['Content-Type'] = 'application/json' headers['Accept'] = 'application/json' headers[ 'Cookie'] = 'osf_staging=55fc5f29029bdb53541b5cda.wTLtvhA3IyD-UGpB3pr7YXIWHvc;' ret = requests.post(api_url_for(NODES), data=json.dumps(body), headers=headers) return RemoteNode(ret.json()['data']).id
def _move_remote_file_folder(self, local_file_folder): url = api_url_for(RESOURCES, node_id=local_file_folder.node.osf_id, provider=local_file_folder.provider, file_id=local_file_folder.osf_id) data = { 'action': 'move', 'path': local_file_folder.parent.osf_path if local_file_folder.parent else '/', 'rename': local_file_folder.name } resp = yield from self.make_request(url, method="POST", data=json.dumps(data)) resp.close() local_file_folder.locally_moved = False # get the updated remote folder # inner_response = requests.get(remote_file_folder['links']['self'], headers=self.headers).json() # we know exactly what changed, so its faster to just change the remote dictionary rather # than making a new api call. # todo: can get the file folder from the osf by making request to parent file folder (local.parent.osf_id,) # todo: and then searching for the correct child based on osf_id. # todo: move can change NODE. THUS, need to REMOVE local_node=local_node in check_file_folder code... # for now, just going to stop synching this things children... NOT PROPER!!!!! # new_remote_file_folder = ... return None
def upload_file(self, local_file): """ THROWS FileNotFoundError !!!!!! :param local_file: :return: """ assert isinstance(local_file, File) assert local_file.is_file # /v1/resources/6/providers/osfstorage/21/?kind=file&name=FUN_FILE HTTP/1.1" 200 - params = { 'provider': local_file.provider, 'name': local_file.name } parent_osf_id = local_file.parent.osf_id if local_file.has_parent else None files_url = api_url_for(RESOURCES, node_id=local_file.node.osf_id, provider=local_file.provider, file_id=parent_osf_id) file = open(local_file.path, 'rb') resp_json = yield from self.make_request(files_url, method="PUT", params=params, data=file, get_json=True) AlertHandler.info(local_file.name, AlertHandler.UPLOAD) return RemoteFile(resp_json['data'])
def check_osf(self, remote_user): assert isinstance(remote_user, dict) assert remote_user['type'] == 'users' remote_user_id = remote_user['id'] nodes_url = api_url_for(USERS, related_type=NODES, user_id=remote_user_id) while True: logger.info('Begining OSF poll') # get local top level nodes local_projects = self.user.top_level_nodes remote_projects = yield from self.osf_query.get_top_level_nodes( nodes_url) paired_projects = self.make_local_remote_tuple_list( local_projects, remote_projects) session.refresh(self.user) sync_list = self.user.guid_for_top_level_nodes_to_sync logger.debug('sync list is: {}'.format(sync_list)) for local, remote in paired_projects: if not remote or remote.id not in sync_list: continue yield from self.check_node(local, remote, local_parent_node=None) yield from self.queue.join() AlertHandler.up_to_date() logger.debug('---------SHOULD HAVE ALL OSF FILES---------') yield from asyncio.sleep(POLL_DELAY)
def fetch(self): remote_top_level_nodes = [] try: user = session.query(User).filter(User.logged_in).one() if user: user_nodes = [] url = api_url_for(USERS, related_type=NODES, user_id=user.osf_id) headers = {'Authorization': 'Bearer {}'.format(user.oauth_token)} resp = requests.get(url, headers=headers).json() user_nodes.extend(resp['data']) while resp['links']['next']: resp = requests.get(resp['links']['next'], headers=headers).json() user_nodes.extend(resp['data']) for node in user_nodes: verified_node = RemoteNode(node) if verified_node.is_top_level: remote_top_level_nodes.append(verified_node) except Exception as e: logging.warning(e) self.finished.emit(remote_top_level_nodes) return remote_top_level_nodes
def test_get_all_paginated_users(self): url = api_url_for(NODES,related_type=CHILDREN, node_id=1) print(url) children = yield from self.osf_query._get_all_paginated_members(url) self.assertEquals(children, [])
def setUp(self): self.created_user = RemoteUser( requests.post(api_url_for(USERS), data={ 'fullname': 'hi' }).json()['data']) assert self.created_user.name == 'hi' headers = {'Authorization': 'Bearer {}'.format(self.created_user.id)} self.session = requests.Session() self.session.headers.update(headers) self.user_resp = self.session.get( api_url_for(USERS, user_id=self.created_user.id)).json()['data'] self.created_node = RemoteNode( self.session.post(api_url_for(NODES), data={ 'title': 'new_node' }).json()['data']) assert self.created_node.name == 'new_node' self.node_resp = self.session.get( self.user_resp['relationships']['nodes']['links'] ['related']).json()['data'][0] self.folder_provider_resp = self.session.get( self.node_resp['relationships']['files']['links']['related'], headers=headers).json()['data'][0] params = {'name': 'FUN_FOLDER'} self.created_folder = RemoteFolder( self.session.put(self.folder_provider_resp['links']['new_folder'], params=params, headers=headers).json()['data']) assert self.created_folder.name == 'FUN_FOLDER' #create another folder self.session.put(self.folder_provider_resp['links']['new_folder'], params={ 'name': 'another folder' }, headers=headers).json()['data'] self.folder_resp = self.session.get( self.folder_provider_resp['relationships']['files']['links'] ['related']['href'], headers=headers).json()['data'][0] self.folder2_resp = self.session.get( self.folder_provider_resp['relationships']['files']['links'] ['related']['href'], headers=headers).json()['data'][1] #create file with contents self.file_contents = ''.join(chr(x) for x in range(128)) self.session.put(self.folder2_resp['links']['upload'], headers=headers, params={ 'name': 'myfile.txt' }, data=self.file_contents).json() self.file_resp = self.session.get( self.folder2_resp['relationships']['files']['links']['related'] ['href'], headers=headers).json()['data'][0]