def test_has_auth(): osf = OSF() assert not osf.has_auth osf = OSF(username='******', password='******') assert osf.has_auth osf = OSF(token='0123456789abcd') assert osf.has_auth
def test_create_project(OSFCore_post): osf = OSF() attr = project_node["data"]["attributes"] project = osf.create_project(attr["title"], attr["category"], description=attr["description"]) calls = [ call( "https://api.osf.io/v2/nodes/", json={ "data": { "type": "nodes", "attributes": { "title": "Preprint Citations Test", "category": "project", "description": "this is a test for preprint citations", "tags": [], }, } }, headers={"Content-Type": "application/json"}, ) ] OSFCore_post.assert_has_calls(calls) assert isinstance(project, Project)
def test_create_osf_export(path): ds = Dataset(path).create(force=True) ds.save() create_results = ds.create_sibling_osf( title="CI dl-create", # do not create a git-remote mode="exportonly") assert_result_count(create_results, 1, status='ok', type='dataset', name='osf-storage', path=ds.path) # if we got here, we created something at OSF; # make sure, we clean up afterwards try: # for now just run an export and make sure it doesn't fail ds.repo.call_git(['annex', 'export', 'HEAD', '--to', 'osf-storage']) finally: # clean remote end: cred = get_credentials(allow_interactive=False) osf = OSF(**cred) delete_node(osf.session, create_results[0]['id'])
def check_api_key(*args, **kwargs): g.zenodo = None try: req = request.get_json(force=True, cache=True) except: req = request.form.to_dict() try: service, userId, apiKey = Util.parseUserId(req.get("userId")) except: apiKey = Util.loadToken(req.get("userId"), "port-openscienceframework").access_token logger.debug("req data: {}".format(req)) if apiKey is None: logger.error("apiKey or userId not found.") abort(401) logger.debug("found apiKey") g.osf = OSF( token=apiKey, address=os.getenv( "OPENSCIENCEFRAMEWORK_API_ADDRESS", "https://api.test.osf.io/v2" ), ) return api_method(*args, **kwargs)
def test_get_registration(OSFCore_get): osf = OSF() project = osf.project('f3szh') calls = [call('https://api.osf.io/v2//guids/f3szh/'), call('https://api.osf.io/v2//registrations/f3szh/')] OSFCore_get.assert_has_calls(calls) assert isinstance(project, Project)
def test_get_project_with_endpoint(OSFCore_get): osf = OSF(base_url='https://api.test.osf.io/v2/') project = osf.project('f3szh') calls = [call('https://api.test.osf.io/v2//guids/f3szh/'), call('https://api.test.osf.io/v2//nodes/f3szh/')] OSFCore_get.assert_has_calls(calls) assert isinstance(project, Project)
def test_project(project_list, length): def response(url, *args, **kwargs): data = { "data": { "relationships": { "nodes": { "links": { "related": { "href": user_nodes_url } } } } } } print(url) if url == user_me_url: return FakeResponse(200, data) else: return FakeResponse(200, {"data": project_list}) OSFCore_get.side_effect = response osf = OSF() osf.login("*****@*****.**", "secret_password") projects = osf.projects() calls = [call(user_me_url), call(user_nodes_url)] OSFCore_get.assert_has_calls(calls) assert isinstance(projects, list) assert len(projects) == length for index, proj in enumerate(projects): assert isinstance(proj, Project) assert proj.id == project_list[index]["data"]["id"]
def test_failed_get_project(OSFCore_get): osf = OSF() with pytest.raises(RuntimeError): osf.project('f3szh') OSFCore_get.assert_called_once_with( 'https://api.osf.io/v2//nodes/f3szh/' )
def test_get_project(OSFCore_get): osf = OSF() project = osf.project('f3szh') OSFCore_get.assert_called_once_with( 'https://api.osf.io/v2//nodes/f3szh/' ) assert isinstance(project, Project)
def test_get_fake(OSFCore_get): osf = OSF() with pytest.raises(OSFException) as exc: osf.project('f3szh') assert exc.value.args[ 0] == 'f3szh is unrecognized type fakes. Clone supports projects and registrations' OSFCore_get.assert_called_once_with('https://api.osf.io/v2//guids/f3szh/')
def test_create_osf_simple(path): ds = Dataset(path).create(force=True) ds.save() file1 = Path('ds') / "file1.txt" create_results = ds.create_sibling_osf(name="osf") assert_result_count(create_results, 2, status='ok') assert_result_count(create_results, 1, status='ok', type='dataset', name="osf-storage", path=ds.path) assert_result_count(create_results, 1, status='ok', type='sibling', name="osf", path=ds.path) # if we got here, we created something at OSF; # make sure, we clean up afterwards try: # special remote is configured: remote_log = ds.repo.call_git( ['cat-file', 'blob', 'git-annex:remote.log']) assert_in("node={}".format(create_results[0]['id']), remote_log) # copy files over ds.repo.copy_to('.', "osf-storage") whereis = ds.repo.whereis(str(file1)) here = ds.config.get("annex.uuid") # files should be 'here' and on remote end: assert_equal(len(whereis), 2) assert_in(here, whereis) # drop content here ds.drop('.') whereis = ds.repo.whereis(str(file1)) # now on remote end only assert_equal(len(whereis), 1) assert_not_in(here, whereis) # and get content again from remote: ds.get('.') whereis = ds.repo.whereis(str(file1)) assert_equal(len(whereis), 2) assert_in(here, whereis) finally: # clean remote end: cred = get_credentials(allow_interactive=False) osf = OSF(**cred) delete_node(osf.session, create_results[0]['id'])
def test_get_project(OSFCore_get): osf = OSF() project = osf.project("f3szh") calls = [ call("https://api.osf.io/v2/guids/f3szh/"), call("https://api.osf.io/v2/nodes/f3szh/"), ] OSFCore_get.assert_has_calls(calls) assert isinstance(project, Project)
def test_delete_project(OSFCore_delete, OSFCore_get): osf = OSF() project = osf.project("f3szh") calls = [call("https://api.osf.io/v2/nodes/f3szh/")] OSFCore_get.assert_has_calls(calls) project.delete() calls = [call("https://api.osf.io/v2/nodes/f3szh/")] OSFCore_delete.assert_has_calls(calls)
def test_project_metadata_only_mutable(OSFCore_get): osf = OSF() project = osf.project("f3szh") md = project.metadata(only_mutable=True) data = project_node["data"]["attributes"] assert data["title"] == md["title"] assert data["description"] == md["description"] assert data["category"] == md["category"] assert data["tags"] == md["tags"] assert data["public"] == md["public"]
def test_project_metadata(OSFCore_get): osf = OSF() project = osf.project("f3szh") md = project.metadata() data = project_node["data"]["attributes"] assert data["title"] == md["title"] assert data["date_created"] == md["date_created"] assert data["date_modified"] == md["date_modified"] assert data["description"] == md["description"] assert data["category"] == md["category"] assert data["tags"] == md["tags"] assert data["public"] == md["public"]
def prepare(self): """""" node_id = self.annex.getconfig('node') if not node_id: # fall back on outdated 'project' parameter, which could be # just the node ID or a full URL to a project node_id = posixpath.basename( urlparse(self.annex.getconfig('project')).path.strip( posixpath.sep)) if not node_id: raise RemoteError('Could not determine OSF node ID') try: # make use of DataLad's credential manager for a more convenient # out-of-the-box behavior from datalad_osf.utils import get_credentials # we must stay non-interactive, because this is running inside # git-annex's special remote protocal creds = get_credentials(allow_interactive=False) except ImportError as e: # whenever anything goes wrong here, stay clam and fall back # on envvars. # we want this special remote to be fully functional without # datalad creds = dict( username=os.environ.get('OSF_USERNAME', None), password=os.environ.get('OSF_PASSWORD', None), token=os.environ.get('OSF_TOKEN', None), ) # next one just sets up the stage, no requests performed yet, hence # no error checking needed # supply both auth credentials, so osfclient can fall back on user/pass # if needed osf = OSF(**creds) # next one performs initial auth try: self.node = osf.project(node_id) except Exception as e: # we need to raise RemoteError() such that PREPARE-FAILURE # is reported, sadly that doesn't give users any clue # TODO support datalad logging here raise RemoteError('Failed to obtain OSF node handle: {}'.format(e)) # which storage to use, defaults to 'osfstorage' # TODO a node could have more than one? Make parameter to select? self.storage = self.node.storage()
def test_project_metadata_jsonld(OSFCore_get): osf = OSF() project = osf.project("f3szh") md = project.metadata(jsonld=True) data = project_node["data"]["attributes"] assert data["title"] == md[osf_to_jsonld["title"]] assert data["description"] == md[osf_to_jsonld["description"]] assert data["category"] == md[osf_to_jsonld["category"]] assert data["tags"] == md[osf_to_jsonld["tags"]] assert data["public"] == md[osf_to_jsonld["public"]] assert data["date_created"] == md[osf_to_jsonld["date_created"]] assert data["date_modified"] == md[osf_to_jsonld["date_modified"]] with pytest.raises(KeyError): md["title"] md["description"]
def with_node(f, osf_session=None, title=None, category="data"): # we don't want the test hanging, no interaction creds = get_credentials(allow_interactive=False) # supply all credentials, so osfclient can fall back on user/pass # if needed osf = OSF(**creds) @wraps(f) def new_func(*args, **kwargs): node_id, proj_url = create_node( osf.session, 'Temporary DataLad CI project: {}'.format(title), category=category) try: return f(*(args + (node_id, )), **kwargs) finally: delete_node(osf.session, node_id) return new_func
def test_update_project(OSFCore_put, OSFCore_get): osf = OSF() attr = project_node["data"]["attributes"] attr["title"] = "Long long title" project = osf.project("f3szh") calls = [call("https://api.osf.io/v2/nodes/f3szh/")] OSFCore_get.assert_has_calls(calls) project.title = attr["title"] project.update() calls = [ call( "https://api.osf.io/v2/nodes/f3szh/", data= '{"data": {"type": "nodes", "id": "f3szh", "attributes": {"category": "project", "description": "this is a test for preprint citations", "title": "Long long title", "public": true, "tags": ["qatest"]}}}', ) ] OSFCore_put.assert_has_calls(calls) assert isinstance(project, Project)
def _get_osf_api(self): """""" try: # make use of DataLad's credential manager for a more convenient # out-of-the-box behavior from datalad_osf.utils import get_credentials # we should be able to allow interactive creds = get_credentials(allow_interactive=True) except ImportError as e: # whenever anything goes wrong here, stay clam and fall back # on envvars. # we want this special remote to be fully functional without # datalad creds = dict( username=os.environ.get('OSF_USERNAME', None), password=os.environ.get('OSF_PASSWORD', None), token=os.environ.get('OSF_TOKEN', None), ) # next one just sets up the stage, no requests performed yet, hence # no error checking needed # supply both auth credentials, so osfclient can fall back on user/pass # if needed return OSF(**creds)
def test_login_token(session_token_auth): osf = OSF() assert not session_token_auth.called osf.login(token="asdfg") session_token_auth.assert_called_with("asdfg")
def test_login_username_password(session_basic_auth): osf = OSF() assert not session_basic_auth.called osf.login('*****@*****.**', 'secret_password') session_basic_auth.assert_called_with('*****@*****.**', 'secret_password')
def test_token_auth(session_token_auth): OSF(token='asdfg') session_token_auth.assert_called_with('asdfg')
def test_basic_auth(session_basic_auth): OSF('*****@*****.**', 'secret_password') session_basic_auth.assert_called_with('*****@*****.**', 'secret_password')
def connect() -> Storage: """Connect to the seed selection OSF project.""" return OSF().project(_PROJECT).storage()
def __call__( title=None, name="osf", storage_name=None, dataset=None, mode="annex", existing='error', trust_level=None, tags=None, public=False, category='data', description=None, ): ds = require_dataset(dataset, purpose="create OSF remote", check_installed=True) res_kwargs = dict( ds=ds, action="create-sibling-osf", logger=lgr, ) # we need an annex if not isinstance(ds.repo, AnnexRepo): yield get_status_dict(type="dataset", status="impossible", message="dataset has no annex", **res_kwargs) return # NOTES: # - we prob. should check osf-special-remote availability upfront to # fail early # - add --recursive option # - recursive won't work easily. Need to think that through. # - would need a naming scheme for subdatasets # - flat on OSF or a tree? # - how do we detect something is there already, so we can skip # rather than duplicate (with a new name)? # osf-type-special-remote sufficient to decide it's not needed? # - adapt to conclusions in issue #30 # -> create those subcomponents # - results need to report URL for created projects suitable for datalad # output formatting! # -> result_renderer # -> needs to ne returned by create_node if not storage_name: storage_name = "{}-storage".format(name) sibling_conflicts = sibling_exists( ds, [name, storage_name], # TODO pass through recursive=False, recursion_limit=None, # fail fast, if error is desired exhaustive=existing == 'error', ) if existing == 'error' and sibling_conflicts: # we only asked for one conflict = sibling_conflicts[0] yield get_status_dict( status='error', message=("a sibling '%s' is already configured in dataset %s", conflict[1], conflict[0]), **res_kwargs, ) return if title is None: # use dataset root basename title = ds.pathobj.name tags = ensure_list(tags) if 'DataLad dataset' not in tags: tags.append('DataLad dataset') if ds.id and ds.id not in tags: tags.append(ds.id) if not description: description = \ "This component was built from a DataLad dataset using the " \ "datalad-osf extension " \ "(https://github.com/datalad/datalad-osf)." if mode != 'exportonly': description += \ " With this extension installed, this component can be " \ "git or datalad cloned from a 'osf://ID' URL, where " \ "'ID' is the OSF node ID that shown in the OSF HTTP " \ "URL, e.g. https://osf.io/q8xnk can be cloned from " \ "osf://q8xnk. " cred = get_credentials(allow_interactive=True) osf = OSF(**cred) node_id, node_url = create_node( osf_session=osf.session, title=title, category=category, tags=tags if tags else None, public=EnsureBool()(public), description=description, ) if mode != 'gitonly': init_opts = [ "encryption=none", "type=external", "externaltype=osf", "autoenable=true", "node={}".format(node_id) ] if mode in ("export", "exportonly"): init_opts += ["exporttree=yes"] ds.repo.init_remote(storage_name, options=init_opts) if trust_level: ds.repo.call_git(['annex', trust_level, storage_name]) yield get_status_dict(type="dataset", url=node_url, id=node_id, name=storage_name, status="ok", **res_kwargs) if mode == 'exportonly': return # append how to clone this specific dataset to the description description += "This particular project can be cloned using" \ " 'datalad clone osf://{}'".format(node_id) update_node(osf_session=osf.session, id_=node_id, description=description) ds.config.set('remote.{}.annex-ignore'.format(name), 'true', where='local') yield from ds.siblings( # use configure, not add, to not trip over the config that # we just made action='configure', name=name, url='osf://{}'.format(node_id), fetch=False, publish_depends=storage_name if mode != 'gitonly' else None, recursive=False, result_renderer=None, )
def test_basic_auth(session_basic_auth): OSF("*****@*****.**", "secret_password") session_basic_auth.assert_called_with("*****@*****.**", "secret_password")
def test_failed_get_project(OSFCore_get): osf = OSF() with pytest.raises(RuntimeError): osf.project("f3szh") OSFCore_get.assert_called_once_with("https://api.osf.io/v2/guids/f3szh/")
def test_token_auth(session_token_auth): OSF(token="asdfg") session_token_auth.assert_called_with("asdfg")
def test_address_parameter(): address = "https://api.test.osf.io/v2" assert OSF(address=address).session.base_url == address assert OSF(address=address).session.base_url != "https://api.osf.io/v2"