def dataverse_set_config(node_addon, auth, **kwargs): """Saves selected Dataverse and dataset to node settings""" user_settings = node_addon.user_settings user = auth.user if user_settings and user_settings.owner != user: raise HTTPError(http.FORBIDDEN) try: assert_clean(request.json) except AssertionError: # TODO: Test me! raise HTTPError(http.NOT_ACCEPTABLE) alias = request.json.get('dataverse', {}).get('alias') doi = request.json.get('dataset', {}).get('doi') if doi is None or alias is None: return HTTPError(http.BAD_REQUEST) connection = client.connect_from_settings(node_addon) dataverse = client.get_dataverse(connection, alias) dataset = client.get_dataset(dataverse, doi) node_addon.set_folder(dataverse, dataset, auth) return {'dataverse': dataverse.title, 'dataset': dataset.title}, http.OK
def dataverse_publish_dataset(node_addon, auth, **kwargs): node = node_addon.owner publish_both = request.json.get('publish_both', False) now = datetime.datetime.utcnow() connection = client.connect_from_settings_or_401(node_addon) dataverse = client.get_dataverse(connection, node_addon.dataverse_alias) dataset = client.get_dataset(dataverse, node_addon.dataset_doi) if publish_both: client.publish_dataverse(dataverse) client.publish_dataset(dataset) # Add a log node.add_log( action='dataverse_dataset_published', params={ 'project': node.parent_id, 'node': node._id, 'dataset': dataset.title, }, auth=auth, log_date=now, ) return {'dataset': dataset.title}, http.OK
def dataverse_publish_dataset(node_addon, auth, **kwargs): node = node_addon.owner publish_both = request.json.get('publish_both', False) now = timezone.now() connection = client.connect_from_settings_or_401(node_addon) dataverse = client.get_dataverse(connection, node_addon.dataverse_alias) dataset = client.get_dataset(dataverse, node_addon.dataset_doi) if publish_both: client.publish_dataverse(dataverse) client.publish_dataset(dataset) # Add a log node.add_log( action='dataverse_dataset_published', params={ 'project': node.parent_id, 'node': node._id, 'dataset': dataset.title, }, auth=auth, log_date=now, ) return {'dataset': dataset.title}, http.OK
def dataverse_get_widget_contents(node_addon, **kwargs): data = { 'connected': False, } if not node_addon.complete: return {'data': data}, http.OK doi = node_addon.dataset_doi alias = node_addon.dataverse_alias connection = connect_from_settings_or_401(node_addon.user_settings) dataverse = get_dataverse(connection, alias) dataset = get_dataset(dataverse, doi) if dataset is None: return {'data': data}, http.BAD_REQUEST dataverse_url = 'http://{0}/dataverse/'.format(HOST) + alias dataset_url = 'http://dx.doi.org/' + doi data.update({ 'connected': True, 'dataverse': node_addon.dataverse, 'dataverseUrl': dataverse_url, 'dataset': node_addon.dataset, 'doi': doi, 'datasetUrl': dataset_url, 'citation': dataset.citation, }) return {'data': data}, http.OK
def dataverse_get_widget_contents(node_addon, **kwargs): data = { 'connected': False, } if not node_addon.complete: return {'data': data}, http.OK doi = node_addon.dataset_doi alias = node_addon.dataverse_alias connection = client.connect_from_settings_or_401(node_addon) dataverse = client.get_dataverse(connection, alias) dataset = client.get_dataset(dataverse, doi) if dataset is None: return {'data': data}, http.BAD_REQUEST dataverse_host = node_addon.external_account.oauth_key dataverse_url = 'http://{0}/dataverse/{1}'.format(dataverse_host, alias) dataset_url = 'https://doi.org/' + doi data.update({ 'connected': True, 'dataverse': node_addon.dataverse, 'dataverseUrl': dataverse_url, 'dataset': node_addon.dataset, 'doi': doi, 'datasetUrl': dataset_url, 'citation': dataset.citation, }) return {'data': data}, http.OK
def dataverse_publish_dataset(node_addon, auth, **kwargs): node = node_addon.owner publish_both = request.json.get('publish_both', False) now = datetime.datetime.utcnow() try: connection = connect_from_settings_or_401(node_addon) except HTTPError as error: if error.code == httplib.UNAUTHORIZED: connection = None else: raise dataverse = get_dataverse(connection, node_addon.dataverse_alias) dataset = get_dataset(dataverse, node_addon.dataset_doi) if publish_both: publish_dataverse(dataverse) publish_dataset(dataset) # Add a log node.add_log( action='dataverse_dataset_published', params={ 'project': node.parent_id, 'node': node._primary_key, 'dataset': dataset.title, }, auth=auth, log_date=now, ) return {'dataset': dataset.title}, httplib.OK
def dataverse_publish(node_addon, auth, publish_both=False): node = node_addon.owner user_settings = node_addon.user_settings now = datetime.datetime.utcnow() try: connection = connect_from_settings_or_401(user_settings) except HTTPError as error: if error.code == httplib.UNAUTHORIZED: connection = None else: raise dataverse = get_dataverse(connection, node_addon.dataverse_alias) dataset = get_dataset(dataverse, node_addon.dataset_doi) if publish_both: publish_dataverse(dataverse) publish_dataset(dataset) # Add a log node.add_log( action='dataverse_dataset_published', params={ 'project': node.parent_id, 'node': node._primary_key, 'dataset': dataset.title, }, auth=auth, log_date=now, ) return {'dataset': dataset.title}, httplib.OK
def dataverse_get_widget_contents(node_addon, **kwargs): data = { 'connected': False, } if not node_addon.is_fully_configured: return {'data': data}, http.OK doi = node_addon.study_hdl alias = node_addon.dataverse_alias connection = connect_from_settings_or_403(node_addon.user_settings) dataverse = get_dataverse(connection, alias) study = get_study(dataverse, doi) if study is None: return {'data': data}, http.BAD_REQUEST dataverse_url = 'http://{0}/dvn/dv/'.format(HOST) + alias study_url = 'http://dx.doi.org/' + doi data.update({ 'connected': True, 'dataverse': node_addon.dataverse, 'dataverseUrl': dataverse_url, 'study': node_addon.study, 'doi': doi, 'studyUrl': study_url, 'citation': study.citation, }) return {'data': data}, http.OK
def dataverse_publish(node_addon, auth, publish_both=False): node = node_addon.owner user_settings = node_addon.user_settings now = datetime.datetime.utcnow() try: connection = connect_from_settings_or_401(user_settings) except HTTPError as error: if error.code == httplib.UNAUTHORIZED: connection = None else: raise dataverse = get_dataverse(connection, node_addon.dataverse_alias) dataset = get_dataset(dataverse, node_addon.dataset_doi) if publish_both: publish_dataverse(dataverse) publish_dataset(dataset) # Add a log node.add_log( action="dataverse_dataset_published", params={"project": node.parent_id, "node": node._primary_key, "dataset": dataset.title}, auth=auth, log_date=now, ) return {"dataset": dataset.title}, httplib.OK
def fail_if_unauthorized(node_addon, auth, file_id): node = node_addon.owner user_settings = node_addon.user_settings if file_id is None: raise HTTPError(httplib.NOT_FOUND) try: connection = connect_from_settings_or_403(user_settings) except HTTPError as error: if error.code == 403: connection = None else: raise dataverse = get_dataverse(connection, node_addon.dataverse_alias) study = get_study(dataverse, node_addon.study_hdl) released_file_ids = [f.id for f in get_files(study, released=True)] all_file_ids = [f.id for f in get_files(study)] + released_file_ids if file_id not in all_file_ids: raise HTTPError(httplib.FORBIDDEN) elif not node.can_edit(auth) and file_id not in released_file_ids: raise HTTPError(httplib.UNAUTHORIZED)
def fail_if_unauthorized(node_addon, auth, file_id): node = node_addon.owner user_settings = node_addon.user_settings if file_id is None: raise HTTPError(http.NOT_FOUND) try: connection = connect_from_settings_or_403(user_settings) except HTTPError as error: if error.code == 403: connection = None else: raise dataverse = get_dataverse(connection, node_addon.dataverse_alias) study = get_study(dataverse, node_addon.study_hdl) released_file_ids = [f.id for f in get_files(study, released=True)] all_file_ids = [f.id for f in get_files(study)] + released_file_ids if file_id not in all_file_ids: raise HTTPError(http.FORBIDDEN) elif not node.can_edit(auth) and file_id not in released_file_ids: raise HTTPError(http.UNAUTHORIZED)
def dataverse_get_widget_contents(node_addon, **kwargs): data = { 'connected': False, } if not node_addon.complete: return {'data': data}, http.OK doi = node_addon.dataset_doi alias = node_addon.dataverse_alias connection = client.connect_from_settings_or_401(node_addon) dataverse = client.get_dataverse(connection, alias) dataset = client.get_dataset(dataverse, doi) if dataset is None: return {'data': data}, http.BAD_REQUEST dataverse_host = node_addon.external_account.oauth_key dataverse_url = 'http://{0}/dataverse/{1}'.format(dataverse_host, alias) dataset_url = 'http://dx.doi.org/' + doi data.update({ 'connected': True, 'dataverse': node_addon.dataverse, 'dataverseUrl': dataverse_url, 'dataset': node_addon.dataset, 'doi': doi, 'datasetUrl': dataset_url, 'citation': dataset.citation, }) return {'data': data}, http.OK
def dataverse_hgrid_root(node_addon, auth, **kwargs): node = node_addon.owner user_settings = node_addon.user_settings default_version = 'latest-published' version = 'latest-published' if not node.can_edit(auth) else default_version # Quit if no dataset linked if not node_addon.complete: return [] can_edit = node.can_edit(auth) permissions = { 'edit': can_edit and not node.is_registration, 'view': node.can_view(auth) } try: connection = connect_from_settings(user_settings) dataverse = get_dataverse(connection, node_addon.dataverse_alias) dataset = get_dataset(dataverse, node_addon.dataset_doi) except SSLError: return [rubeus.build_addon_root( node_addon, node_addon.dataset, permissions=permissions )] # Quit if doi does not produce a dataset if dataset is None: return [] published_files = get_files(dataset, published=True) # Produce draft version or quit if no published version is available if not published_files: if can_edit: version = 'latest' else: return [] urls = { 'publish': node.api_url_for('dataverse_publish_dataset'), 'publishBoth': node.api_url_for('dataverse_publish_both') } return [rubeus.build_addon_root( node_addon, node_addon.dataset, urls=urls, permissions=permissions, dataset=node_addon.dataset, doi=dataset.doi, dataverse=dataverse.title, hasPublishedFiles=bool(published_files), dataverseIsPublished=dataverse.is_published, version=version, )]
def test_get_unreleased_dataverse(self): type(self.mock_dataverse).is_released = mock.PropertyMock(return_value=False) self.mock_connection.get_dataverse.return_value = self.mock_dataverse d = get_dataverse(self.mock_connection, 'ALIAS') self.mock_connection.get_dataverse.assert_called_once_with('ALIAS') assert_equal(d, None)
def test_get_unpublished_dataverse(self): type(self.mock_dataverse).is_published = mock.PropertyMock(return_value=False) self.mock_connection.get_dataverse.return_value = self.mock_dataverse d = get_dataverse(self.mock_connection, "ALIAS") self.mock_connection.get_dataverse.assert_called_once_with("ALIAS") assert_equal(d, self.mock_dataverse)
def test_get_dataverse(self): type(self.mock_dataverse).is_published = mock.PropertyMock(return_value=True) self.mock_connection.get_dataverse.return_value = self.mock_dataverse d = get_dataverse(self.mock_connection, 'ALIAS') self.mock_connection.get_dataverse.assert_called_once_with('ALIAS') assert_equal(d, self.mock_dataverse)
def dataverse_get_datasets(node_addon, **kwargs): alias = request.json.get("alias") user_settings = node_addon.user_settings connection = client.connect_from_settings(user_settings) dataverse = client.get_dataverse(connection, alias) datasets = client.get_datasets(dataverse) ret = {"datasets": [{"title": dataset.title, "doi": dataset.doi} for dataset in datasets]} return ret, http.OK
def dataverse_get_datasets(node_addon, **kwargs): """Get list of datasets from provided Dataverse alias""" alias = request.json.get('alias') connection = client.connect_from_settings(node_addon) dataverse = client.get_dataverse(connection, alias) datasets = client.get_datasets(dataverse) ret = { 'alias': alias, # include alias to verify dataset container 'datasets': [{'title': dataset.title, 'doi': dataset.doi} for dataset in datasets], } return ret, http.OK
def dataverse_hgrid_data_contents(node_addon, auth, **kwargs): node = node_addon.owner user_settings = node_addon.user_settings state = request.args.get('state') default_state = 'released' state = 'released' if not node.can_edit(auth) else state or default_state released = state == 'released' can_edit = node.can_edit(auth) and not node.is_registration and not released can_view = node.can_view(auth) connection = connect_from_settings(user_settings) if node_addon.study_hdl is None or connection is None: return [] dataverse = get_dataverse(connection, node_addon.dataverse_alias) study = get_study(dataverse, node_addon.study_hdl) # Quit if hdl does not produce a study if study is None: return [] info = [] for f in get_files(study, released): item = { 'addon': 'dataverse', 'provider': 'dataverse', rubeus.KIND: 'file', 'name': f.name, 'path': f.name, 'file_id': f.id, 'ext': os.path.splitext(f.name)[1], 'urls': { 'view': node.web_url_for('dataverse_view_file', path=f.id), 'download': node.web_url_for('dataverse_download_file', path=f.id), 'delete': node.api_url_for('dataverse_delete_file', path=f.id), }, 'permissions': { 'view': can_view, 'edit': can_edit, }, } info.append(item) return {'data': info}
def dataverse_hgrid_data_contents(node_addon, auth, **kwargs): node = node_addon.owner user_settings = node_addon.user_settings state = request.args.get('state') default_state = 'released' state = 'released' if not node.can_edit(auth) else state or default_state released = state == 'released' can_edit = node.can_edit( auth) and not node.is_registration and not released can_view = node.can_view(auth) connection = connect_from_settings(user_settings) if node_addon.study_hdl is None or connection is None: return [] dataverse = get_dataverse(connection, node_addon.dataverse_alias) study = get_study(dataverse, node_addon.study_hdl) # Quit if hdl does not produce a study if study is None: return [] info = [] for f in get_files(study, released): item = { 'addon': 'dataverse', 'provider': 'dataverse', rubeus.KIND: 'file', 'name': f.name, 'path': f.name, 'file_id': f.id, 'ext': os.path.splitext(f.name)[1], 'urls': { 'view': node.web_url_for('dataverse_view_file', path=f.id), 'download': node.web_url_for('dataverse_download_file', path=f.id), 'delete': node.api_url_for('dataverse_delete_file', path=f.id), }, 'permissions': { 'view': can_view, 'edit': can_edit, }, } info.append(item) return {'data': info}
def dataverse_get_studies(node_addon, **kwargs): alias = request.json.get('alias') user_settings = node_addon.user_settings connection = client.connect_from_settings(user_settings) dataverse = client.get_dataverse(connection, alias) studies, bad_studies = client.get_studies(dataverse) ret = { 'studies': [{'title': study.title, 'hdl': study.doi} for study in studies], 'badStudies': [{'hdl': bad_study.doi, 'url': 'http://dx.doi.org/' + bad_study.doi} for bad_study in bad_studies], } code = http.PARTIAL_CONTENT if bad_studies else http.OK return ret, code
def dataverse_get_studies(node_addon, **kwargs): alias = request.json.get('alias') user_settings = node_addon.user_settings connection = connect_from_settings(user_settings) dataverse = get_dataverse(connection, alias) studies, bad_studies = get_studies(dataverse) rv = { 'studies': [{'title': study.title, 'hdl': study.doi} for study in studies], 'badStudies': [{'hdl': bad_study.doi, 'url': 'http://dx.doi.org/' + bad_study.doi} for bad_study in bad_studies], } code = http.PARTIAL_CONTENT if bad_studies else http.OK return rv, code
def dataverse_get_datasets(node_addon, **kwargs): alias = request.json.get('alias') user_settings = node_addon.user_settings connection = client.connect_from_settings(user_settings) dataverse = client.get_dataverse(connection, alias) datasets = client.get_datasets(dataverse) ret = { 'datasets': [{ 'title': dataset.title, 'doi': dataset.doi } for dataset in datasets], } return ret, http.OK
def dataverse_set_config(node_addon, auth, **kwargs): """Saves selected Dataverse and dataset to node settings""" user_settings = node_addon.user_settings user = auth.user if user_settings and user_settings.owner != user: raise HTTPError(http.FORBIDDEN) try: assert_clean(request.json) except AssertionError: # TODO: Test me! raise HTTPError(http.NOT_ACCEPTABLE) alias = request.json.get('dataverse').get('alias') doi = request.json.get('dataset').get('doi') if doi is None: return HTTPError(http.BAD_REQUEST) connection = client.connect_from_settings(node_addon) dataverse = client.get_dataverse(connection, alias) dataset = client.get_dataset(dataverse, doi) node_addon.dataverse_alias = dataverse.alias node_addon.dataverse = dataverse.title node_addon.dataset_doi = dataset.doi node_addon.dataset_id = dataset.id node_addon.dataset = dataset.title node = node_addon.owner node.add_log( action='dataverse_dataset_linked', params={ 'project': node.parent_id, 'node': node._primary_key, 'dataset': dataset.title, }, auth=auth, ) node_addon.save() return {'dataverse': dataverse.title, 'dataset': dataset.title}, http.OK
def set_dataverse_and_study(node_addon, auth, **kwargs): user_settings = node_addon.user_settings user = auth.user if user_settings and user_settings.owner != user: raise HTTPError(http.FORBIDDEN) try: assert_clean(request.json) except AssertionError: # TODO: Test me! raise HTTPError(http.NOT_ACCEPTABLE) alias = request.json.get('dataverse').get('alias') hdl = request.json.get('study').get('hdl') if hdl is None: return HTTPError(http.BAD_REQUEST) connection = client.connect_from_settings(user_settings) dataverse = client.get_dataverse(connection, alias) study = client.get_study(dataverse, hdl) node_addon.dataverse_alias = dataverse.alias node_addon.dataverse = dataverse.title node_addon.study_hdl = study.doi node_addon.study = study.title node = node_addon.owner node.add_log( action='dataverse_study_linked', params={ 'project': node.parent_id, 'node': node._primary_key, 'study': study.title, }, auth=auth, ) node_addon.save() return {'dataverse': dataverse.title, 'study': study.title}, http.OK
def set_dataverse_and_study(node_addon, auth, **kwargs): user_settings = node_addon.user_settings user = get_current_user() if user_settings and user_settings.owner != user: raise HTTPError(http.FORBIDDEN) try: assert_clean(request.json) except AssertionError: # TODO: Test me! raise HTTPError(http.NOT_ACCEPTABLE) alias = request.json.get('dataverse').get('alias') hdl = request.json.get('study').get('hdl') if hdl is None: return HTTPError(http.BAD_REQUEST) connection = connect_from_settings(user_settings) dataverse = get_dataverse(connection, alias) study = get_study(dataverse, hdl) node_addon.dataverse_alias = dataverse.alias node_addon.dataverse = dataverse.title node_addon.study_hdl = study.doi node_addon.study = study.title node = node_addon.owner node.add_log( action='dataverse_study_linked', params={ 'project': node.parent_id, 'node': node._primary_key, 'study': study.title, }, auth=auth, ) node_addon.save() return {'dataverse': dataverse.title, 'study': study.title}, http.OK
def dataverse_delete_file(node_addon, auth, **kwargs): node = node_addon.owner user_settings = node_addon.user_settings now = datetime.datetime.utcnow() file_id = kwargs.get('path') if file_id is None: raise HTTPError(httplib.NOT_FOUND) try: connection = connect_from_settings_or_403(user_settings) except HTTPError as error: if error.code == httplib.FORBIDDEN: connection = None else: raise dataverse = get_dataverse(connection, node_addon.dataverse_alias) study = get_study(dataverse, node_addon.study_hdl) file = get_file_by_id(study, file_id) delete_file(file) # Check if file was deleted if get_file_by_id(study, file_id) is not None: raise HTTPError(httplib.BAD_REQUEST) node.add_log( action='dataverse_file_removed', params={ 'project': node.parent_id, 'node': node._primary_key, 'filename': file.name, 'study': study.title, }, auth=auth, log_date=now, ) return {}
def dataverse_delete_file(node_addon, auth, **kwargs): node = node_addon.owner user_settings = node_addon.user_settings now = datetime.datetime.utcnow() file_id = kwargs.get('path') if file_id is None: raise HTTPError(http.NOT_FOUND) try: connection = connect_from_settings_or_403(user_settings) except HTTPError as error: if error.code == 403: connection = None else: raise dataverse = get_dataverse(connection, node_addon.dataverse_alias) study = get_study(dataverse, node_addon.study_hdl) file = get_file_by_id(study, file_id) delete_file(file) # Check if file was deleted if get_file_by_id(study, file_id) is not None: raise HTTPError(http.BAD_REQUEST) node.add_log( action='dataverse_file_removed', params={ 'project': node.parent_id, 'node': node._primary_key, 'filename': file.name, 'study': study.title, }, auth=auth, log_date=now, ) return {}
def set_dataverse_and_dataset(node_addon, auth, **kwargs): user_settings = node_addon.user_settings user = auth.user if user_settings and user_settings.owner != user: raise HTTPError(http.FORBIDDEN) try: assert_clean(request.json) except AssertionError: # TODO: Test me! raise HTTPError(http.NOT_ACCEPTABLE) alias = request.json.get("dataverse").get("alias") doi = request.json.get("dataset").get("doi") if doi is None: return HTTPError(http.BAD_REQUEST) connection = client.connect_from_settings(user_settings) dataverse = client.get_dataverse(connection, alias) dataset = client.get_dataset(dataverse, doi) node_addon.dataverse_alias = dataverse.alias node_addon.dataverse = dataverse.title node_addon.dataset_doi = dataset.doi node_addon.dataset_id = dataset.id node_addon.dataset = dataset.title node = node_addon.owner node.add_log( action="dataverse_dataset_linked", params={"project": node.parent_id, "node": node._primary_key, "dataset": dataset.title}, auth=auth, ) node_addon.save() return {"dataverse": dataverse.title, "dataset": dataset.title}, http.OK
def dataverse_release_study(node_addon, auth, **kwargs): node = node_addon.owner user_settings = node_addon.user_settings now = datetime.datetime.utcnow() try: connection = connect_from_settings_or_403(user_settings) except HTTPError as error: if error.code == 403: connection = None else: raise dataverse = get_dataverse(connection, node_addon.dataverse_alias) study = get_study(dataverse, node_addon.study_hdl) if study.get_state() == 'RELEASED': raise HTTPError(httplib.CONFLICT) release_study(study) # Add a log node.add_log( action='dataverse_study_released', params={ 'project': node.parent_id, 'node': node._primary_key, 'study': study.title, }, auth=auth, log_date=now, ) return {'study': study.title}, httplib.OK
def dataverse_release_study(node_addon, auth, **kwargs): node = node_addon.owner user_settings = node_addon.user_settings now = datetime.datetime.utcnow() try: connection = connect_from_settings_or_403(user_settings) except HTTPError as error: if error.code == 403: connection = None else: raise dataverse = get_dataverse(connection, node_addon.dataverse_alias) study = get_study(dataverse, node_addon.study_hdl) if study.get_state() == 'RELEASED': raise HTTPError(http.CONFLICT) release_study(study) # Add a log node.add_log( action='dataverse_study_released', params={ 'project': node.parent_id, 'node': node._primary_key, 'study': study.title, }, auth=auth, log_date=now, ) return {'study': study.title}, http.OK
def dataverse_hgrid_root(node_addon, auth, state=None, **kwargs): node = node_addon.owner user_settings = node_addon.user_settings default_state = 'released' state = 'released' if not node.can_edit(auth) else state or default_state connection = connect_from_settings(user_settings) # Quit if no study linked if node_addon.study_hdl is None or connection is None: return [] dataverse = get_dataverse(connection, node_addon.dataverse_alias) study = get_study(dataverse, node_addon.study_hdl) # Quit if hdl does not produce a study if study is None: return [] released_files = get_files(study, released=True) authorized = node.can_edit(auth) # Produce draft version or quit if no released version is available if not released_files: if authorized: state = 'draft' else: return [] study_name = node_addon.study if len(study_name) > 23: study_name = u'{0}...'.format(study_name[:20]) permissions = { 'edit': node.can_edit(auth) and not node.is_registration, 'view': node.can_view(auth) } urls = { 'upload': node.api_url_for('dataverse_upload_file'), 'fetch': node.api_url_for('dataverse_hgrid_data_contents', state=state), 'state': node.api_url_for('dataverse_root_folder_public'), 'release': node.api_url_for('dataverse_release_study'), } buttons = [rubeus.build_addon_button( '<i class="fa fa-globe"></i> Release Study', 'releaseStudy')] if state == 'draft' else None return [rubeus.build_addon_root( node_addon, study_name, urls=urls, permissions=permissions, buttons=buttons, study=study_name, doi=study.doi, dataverse=dataverse.title, citation=study.citation, hasReleasedFiles=bool(released_files), state=state, )]
def dataverse_upload_file(node_addon, auth, **kwargs): node = node_addon.owner user_settings = node_addon.user_settings try: name = request.args['name'] except KeyError: raise HTTPError(httplib.BAD_REQUEST) now = datetime.datetime.utcnow() can_edit = node.can_edit(auth) and not node.is_registration can_view = node.can_view(auth) try: connection = connect_from_settings_or_403(user_settings) except HTTPError as error: if error.code == httplib.FORBIDDEN: connection = None else: raise dataverse = get_dataverse(connection, node_addon.dataverse_alias) study = get_study(dataverse, node_addon.study_hdl) filename = secure_filename(name) status_code = httplib.CREATED old_id = None # Fail if file is too small (Dataverse issue) content = request.data if len(content) < 5: raise HTTPError(httplib.UNSUPPORTED_MEDIA_TYPE) # Replace file if old version exists old_file = get_file(study, filename) if old_file is not None: status_code = httplib.OK old_id = old_file.id delete_file(old_file) # Check if file was deleted if get_file_by_id(study, old_id) is not None: raise HTTPError(httplib.BAD_REQUEST) upload_file(study, filename, content) file = get_file(study, filename) if file is None: raise HTTPError(httplib.BAD_REQUEST) node.add_log( action='dataverse_file_added', params={ 'project': node.parent_id, 'node': node._primary_key, 'filename': filename, 'path': node.web_url_for('dataverse_view_file', path=file.id), 'study': study.title, }, auth=auth, log_date=now, ) info = { 'addon': 'dataverse', 'file_id': file.id, 'old_id': old_id, 'name': filename, 'path': filename, 'size': [ len(content), rubeus.format_filesize(len(content)) ], rubeus.KIND: rubeus.FILE, 'urls': { 'view': node.web_url_for('dataverse_view_file', path=file.id), 'download': node.web_url_for('dataverse_download_file', path=file.id), 'delete': node.api_url_for('dataverse_delete_file', path=file.id), }, 'permissions': { 'view': can_view, 'edit': can_edit, }, } return info, status_code
def dataverse_upload_file(node_addon, auth, **kwargs): node = node_addon.owner user_settings = node_addon.user_settings try: name = request.args['name'] except KeyError: raise HTTPError(http.BAD_REQUEST) now = datetime.datetime.utcnow() can_edit = node.can_edit(auth) and not node.is_registration can_view = node.can_view(auth) try: connection = connect_from_settings_or_403(user_settings) except HTTPError as error: if error.code == 403: connection = None else: raise dataverse = get_dataverse(connection, node_addon.dataverse_alias) study = get_study(dataverse, node_addon.study_hdl) filename = secure_filename(name) action = 'file_uploaded' old_id = None # Fail if file is too small (Dataverse issue) content = request.data if len(content) < 5: raise HTTPError(http.UNSUPPORTED_MEDIA_TYPE) # Replace file if old version exists old_file = get_file(study, filename) if old_file is not None: action = 'file_updated' old_id = old_file.id delete_file(old_file) # Check if file was deleted if get_file_by_id(study, old_id) is not None: raise HTTPError(http.BAD_REQUEST) upload_file(study, filename, content) file = get_file(study, filename) if file is None: raise HTTPError(http.BAD_REQUEST) node.add_log( action='dataverse_file_added', params={ 'project': node.parent_id, 'node': node._primary_key, 'filename': filename, 'path': node.web_url_for('dataverse_view_file', path=file.id), 'study': study.title, }, auth=auth, log_date=now, ) info = { 'addon': 'dataverse', 'file_id': file.id, 'old_id': old_id, 'name': filename, 'path': filename, 'size': [len(content), rubeus.format_filesize(len(content))], rubeus.KIND: rubeus.FILE, 'urls': { 'view': node.web_url_for('dataverse_view_file', path=file.id), 'download': node.web_url_for('dataverse_download_file', path=file.id), 'delete': node.api_url_for('dataverse_delete_file', path=file.id), }, 'permissions': { 'view': can_view, 'edit': can_edit, }, 'actionTaken': action, } return info, 201
def dataverse_hgrid_root(node_addon, auth, state=None, **kwargs): node = node_addon.owner user_settings = node_addon.user_settings default_state = 'released' state = 'released' if not node.can_edit(auth) else state or default_state connection = connect_from_settings(user_settings) # Quit if no study linked if node_addon.study_hdl is None or connection is None: return [] dataverse = get_dataverse(connection, node_addon.dataverse_alias) study = get_study(dataverse, node_addon.study_hdl) # Quit if hdl does not produce a study if study is None: return [] released_files = get_files(study, released=True) authorized = node.can_edit(auth) # Produce draft version or quit if no released version is available if not released_files: if authorized: state = 'draft' else: return [] study_name = node_addon.study if len(study_name) > 23: study_name = u'{0}...'.format(study_name[:20]) permissions = { 'edit': node.can_edit(auth) and not node.is_registration, 'view': node.can_view(auth) } urls = { 'upload': node.api_url_for('dataverse_upload_file'), 'fetch': node.api_url_for('dataverse_hgrid_data_contents', state=state), 'state': node.api_url_for('dataverse_root_folder_public'), 'release': node.api_url_for('dataverse_release_study'), } buttons = [ rubeus.build_addon_button('<i class="fa fa-globe"></i> Release Study', 'releaseStudy') ] if state == 'draft' else None return [ rubeus.build_addon_root( node_addon, study_name, urls=urls, permissions=permissions, buttons=buttons, study=study_name, doi=study.doi, dataverse=dataverse.title, citation=study.citation, hasReleasedFiles=bool(released_files), state=state, ) ]
def _dataverse_root_folder(node_addon, auth, **kwargs): node = node_addon.owner default_version = 'latest-published' version = 'latest-published' if not node.can_edit(auth) else default_version # Quit if no dataset linked if not node_addon.complete: return [] can_edit = node.can_edit(auth) permissions = { 'edit': can_edit and not node.is_registration, 'view': node.can_view(auth) } try: connection = client.connect_from_settings(node_addon) dataverse = client.get_dataverse(connection, node_addon.dataverse_alias) dataset = client.get_dataset(dataverse, node_addon.dataset_doi) except SSLError: return [rubeus.build_addon_root( node_addon, node_addon.dataset, permissions=permissions )] # Quit if doi does not produce a dataset if dataset is None: return [] published_files = client.get_files(dataset, published=True) # Produce draft version or quit if no published version is available if not published_files: if can_edit: version = 'latest' else: return [] urls = { 'publish': node.api_url_for('dataverse_publish_dataset'), } # determine if there are any changes between the published and draft # versions of the dataset try: dataset.get_metadata('latest-published') dataset_is_published = True dataset_draft_modified = dataset.get_state() == 'DRAFT' except VersionJsonNotFoundError: dataset_is_published = False dataset_draft_modified = True # Get the dataverse host # (stored in oauth_key because dataverse doesn't use that) dataverse_host = node_addon.external_account.oauth_key return [rubeus.build_addon_root( node_addon, node_addon.dataset, urls=urls, permissions=permissions, dataset=node_addon.dataset, doi=dataset.doi, dataverse=dataverse.title, hasPublishedFiles=bool(published_files), dataverseIsPublished=dataverse.is_published, datasetIsPublished=dataset_is_published, datasetDraftModified=dataset_draft_modified, version=version, host=dataverse_host, )]
def _dataverse_root_folder(node_addon, auth, **kwargs): node = node_addon.owner default_version = 'latest-published' version = 'latest-published' if not node.can_edit(auth) else default_version # Quit if no dataset linked if not node_addon.complete: return [] can_edit = node.can_edit(auth) permissions = { 'edit': can_edit and not node.is_registration, 'view': node.can_view(auth) } try: connection = client.connect_from_settings(node_addon) dataverse = client.get_dataverse(connection, node_addon.dataverse_alias) dataset = client.get_dataset(dataverse, node_addon.dataset_doi) except SSLError: return [rubeus.build_addon_root( node_addon, node_addon.dataset, permissions=permissions, private_key=kwargs.get('view_only', None), )] # Quit if doi does not produce a dataset if dataset is None: return [] published_files = client.get_files(dataset, published=True) # Produce draft version or quit if no published version is available if not published_files: if can_edit: version = 'latest' else: return [] urls = { 'publish': node.api_url_for('dataverse_publish_dataset'), } # determine if there are any changes between the published and draft # versions of the dataset try: dataset.get_metadata('latest-published') dataset_is_published = True dataset_draft_modified = dataset.get_state() == 'DRAFT' except VersionJsonNotFoundError: dataset_is_published = False dataset_draft_modified = True # Get the dataverse host # (stored in oauth_key because dataverse doesn't use that) dataverse_host = node_addon.external_account.oauth_key return [rubeus.build_addon_root( node_addon, node_addon.dataset, urls=urls, permissions=permissions, dataset=node_addon.dataset, doi=dataset.doi, dataverse=dataverse.title, hasPublishedFiles=bool(published_files), dataverseIsPublished=dataverse.is_published, datasetIsPublished=dataset_is_published, datasetDraftModified=dataset_draft_modified, version=version, host=dataverse_host, private_key=kwargs.get('view_only', None), )]