def test_view_project_child_exists(self): linked_node = ProjectFactory(creator=self.user) result = _view_project(self.node, Auth(self.user)) assert_equal(result['node']['child_exists'], False) self.node.add_node_link(linked_node, Auth(self.user), save=True) result = _view_project(self.node, Auth(self.user)) assert_equal(result['node']['child_exists'], False) child_component = NodeFactory(creator=self.user, parent=self.node) result = _view_project(self.node, Auth(self.user)) assert_equal(result['node']['child_exists'], True)
def test_serialize_node_summary_child_exists(self): user = UserFactory() parent_node = ProjectFactory(creator=user) linked_node = ProjectFactory(creator=user) result = _view_project(parent_node, Auth(user)) assert_equal(result['node']['child_exists'], False) parent_node.add_node_link(linked_node, Auth(user), save=True) result = _view_project(parent_node, Auth(user)) assert_equal(result['node']['child_exists'], False) child_component = NodeFactory(creator=user, parent=parent_node) result = _view_project(parent_node, Auth(user)) assert_equal(result['node']['child_exists'], True)
def test_view_project_pending_registration_for_admin_contributor_does_contain_cancel_link(self): pending_reg = RegistrationFactory(project=self.node, archive=True) assert_true(pending_reg.is_pending_registration) result = _view_project(pending_reg, Auth(self.user)) assert_not_equal(result["node"]["disapproval_link"], "") assert_in("/?token=", result["node"]["disapproval_link"]) pending_reg.remove()
def test_view_project_embed_registrations_includes_contribution_count( self, mock_archive): self.project.register_node(get_default_metaschema(), Auth(user=self.project.creator), '', None) data = _view_project(node=self.project, auth=Auth(self.project.creator), embed_registrations=True) assert_is_not_none(data['node']['registrations'][0]['nlogs'])
def test_view_project_embed_descendants(self): child = NodeFactory(parent=self.project, creator=self.user) res = _view_project(self.project, auth=Auth(self.project.creator), embed_descendants=True) assert_in('descendants', res['node']) assert_equal(len(res['node']['descendants']), 1) assert_equal(res['node']['descendants'][0]['id'], child._id)
def collect_file_trees(auth, node, **kwargs): """Collect file trees for all add-ons implementing HGrid views, then format data as appropriate. """ serialized = _view_project(node, auth, primary=True) # Add addon static assets serialized.update(rubeus.collect_addon_assets(node)) return serialized
def collect_timestamp_trees_to_json(auth, node, **kwargs): # admin call project to provider file list serialized = _view_project(node, auth, primary=True) serialized.update(rubeus.collect_addon_assets(node)) uid = Guid.objects.get(_id=serialized['user']['id']).object_id pid = kwargs.get('pid') timestamp.do_verification(uid, pid, node) return {'message': 'OK'}
def test_view_project_pending_registration_for_admin_contributor_does_contain_cancel_link(self): pending_reg = RegistrationFactory(project=self.node, archive=True) assert_true(pending_reg.is_pending_registration) result = _view_project(pending_reg, Auth(self.user)) assert_not_equal(result['node']['disapproval_link'], '') assert_in('/?token=', result['node']['disapproval_link']) pending_reg.delete()
def test_view_project_pending_registration_for_write_contributor_does_not_contain_cancel_link(self): write_user = UserFactory() self.node.add_contributor(write_user, permissions=permissions.WRITE, auth=Auth(self.user), save=True) pending_reg = RegistrationFactory(project=self.node, archive=True) assert_true(pending_reg.is_pending_registration) result = _view_project(pending_reg, Auth(write_user)) assert_equal(result["node"]["disapproval_link"], "") pending_reg.remove()
def dataverse_view_file(node_addon, auth, **kwargs): node = node_addon.owner file_id = kwargs.get('path') fail_if_unauthorized(node_addon, auth, file_id) fail_if_private(file_id) # lazily create a file GUID record file_obj, created = DataverseFile.get_or_create(node=node, path=file_id) redirect_url = check_file_guid(file_obj) if redirect_url: return redirect(redirect_url) # Get or create rendered file cache_file_name = '{0}.html'.format(file_id) rendered = get_cache_content(node_addon, cache_file_name) if rendered is None: filename, content = scrape_dataverse(file_id) _, ext = os.path.splitext(filename) download_url = node.api_url_for( 'dataverse_download_file_proxy', path=file_id ) rendered = get_cache_content( node_addon, cache_file_name, start_render=True, remote_path=file_obj.file_id + ext, # Include extension for MFR file_content=content, download_url=download_url, ) else: filename, _ = scrape_dataverse(file_id, name_only=True) render_url = node.api_url_for( 'dataverse_get_rendered_file', path=file_id, render=True ) ret = { 'file_name': filename, 'rendered': rendered, 'render_url': render_url, 'urls': { 'render': render_url, 'download': node.web_url_for('dataverse_download_file', path=file_id), 'info': node.api_url_for('dataverse_get_file_info', path=file_id), } } ret.update(_view_project(node, auth)) return ret
def test_view_project_pending_registration_for_write_contributor_does_not_contain_cancel_link(self): write_user = UserFactory() self.node.add_contributor(write_user, permissions=permissions.WRITE, auth=Auth(self.user), save=True) pending_reg = RegistrationFactory(project=self.node, archive=True) assert_true(pending_reg.is_pending_registration) result = _view_project(pending_reg, Auth(write_user)) assert_equal(result['node']['disapproval_link'], '') pending_reg.delete()
def test_view_project_embed_forks_excludes_registrations(self): project = ProjectFactory() fork = project.fork_node(Auth(project.creator)) reg = RegistrationFactory(project=fork) res = _view_project(project, auth=Auth(project.creator), embed_forks=True) assert_in('forks', res['node']) assert_equal(len(res['node']['forks']), 1) assert_equal(res['node']['forks'][0]['id'], fork._id)
def badges_page(*args, **kwargs): node = kwargs["node"] or kwargs["project"] auth = kwargs["auth"] ret = {"complete": True, "assertions": get_sorted_node_badges(node)} if auth.user: badger = auth.user.get_addon("badges") if badger: ret.update(badger.to_json(auth.user)) ret["uid"] = auth.user._id ret.update(_view_project(node, auth)) return ret
def collect_file_trees(**kwargs): """Collect file trees for all add-ons implementing HGrid views, then format data as appropriate. """ node = kwargs['node'] or kwargs['project'] auth = kwargs['auth'] serialized = _view_project(node, auth) # Add addon static assets serialized.update(rubeus.collect_addon_assets(node)) return serialized
def test_view_project_pointer_count_excludes_folders(self): pointer_project = ProjectFactory(is_public=True) # project that points to another project pointed_project = self.node # project that other project points to pointer_project.add_pointer(pointed_project, Auth(pointer_project.creator), save=True) # Project is in a organizer collection folder = CollectionFactory(creator=pointed_project.creator) folder.add_pointer(pointed_project, Auth(pointed_project.creator), save=True) result = _view_project(pointed_project, Auth(pointed_project.creator)) # pointer_project is included in count, but not folder assert_equal(result['node']['points'], 1)
def get_init_timestamp_error_data_list(auth, node, **kwargs): """ get timestamp error data list (OSF view) """ ctx = _view_project(node, auth, primary=True) ctx.update(rubeus.collect_addon_assets(node)) pid = kwargs.get('pid') ctx['provider_list'] = timestamp.get_error_list(pid) ctx['project_title'] = node.title ctx['guid'] = pid ctx['web_api_url'] = settings.DOMAIN + node.api_url return ctx
def test_view_project_pointer_count_excludes_folders(self): user = UserFactory() pointer_project = ProjectFactory(is_public=True) # project that points to another project pointed_project = ProjectFactory(creator=user) # project that other project points to pointer_project.add_pointer(pointed_project, Auth(pointer_project.creator), save=True) # Project is in a dashboard folder folder = FolderFactory(creator=pointed_project.creator) folder.add_pointer(pointed_project, Auth(pointed_project.creator), save=True) result = _view_project(pointed_project, Auth(pointed_project.creator)) # pointer_project is included in count, but not folder assert_equal(result['node']['points'], 1)
def badges_page(*args, **kwargs): node = kwargs['node'] or kwargs['project'] auth = kwargs['auth'] ret = { 'complete': True, 'assertions': get_sorted_node_badges(node), } if auth.user: badger = auth.user.get_addon('badges') if badger: ret.update(badger.to_json(auth.user)) ret['uid'] = auth.user._id ret.update(_view_project(node, auth)) return ret
def project_wiki_edit(auth, wname, **kwargs): node = kwargs['node'] or kwargs['project'] wiki_name = wname.strip() wiki_page = node.get_wiki_page(wiki_name) # ensure home is always lower case since it cannot be renamed if wiki_name.lower() == 'home': wiki_name = 'home' if wiki_page: version = wiki_page.version is_current = wiki_page.is_current content = wiki_page.content wiki_page_api_url = node.api_url_for('project_wiki_page', wname=wiki_name) else: version = 'NA' is_current = False content = '' wiki_page_api_url = None # TODO: Remove duplication with project_wiki_page toc = _serialize_wiki_toc(node, auth=auth) ret = { 'wiki_id': wiki_page._primary_key if wiki_page else None, 'wiki_name': wiki_page.page_name if wiki_page else wiki_name, 'wiki_content': content, 'version': version, 'versions': _get_wiki_versions(node, wiki_name), 'is_current': is_current, 'is_edit': True, 'pages_current': _get_wiki_pages_current(node), 'toc': toc, 'category': node.category, 'urls': { 'api': _get_wiki_api_urls( node, wiki_name, { 'content': node.api_url_for('wiki_page_content', wname=wiki_name), 'page': wiki_page_api_url }), 'web': _get_wiki_web_urls(node, wiki_name), }, } ret.update(_view_project(node, auth, primary=True)) return ret
def test_view_project_embed_registrations_sorted_by_registered_date_descending(self): # register a project several times, with various registered_dates registrations = [] for days_ago in (21, 3, 2, 8, 13, 5, 1): registration = RegistrationFactory(project=self.project) reg_date = registration.registered_date - dt.timedelta(days_ago) registration.registered_date = reg_date registration.save() registrations.append(registration) registrations.sort(key=lambda r: r.registered_date, reverse=True) expected = [r._id for r in registrations] data = _view_project(node=self.project, auth=Auth(self.project.creator), embed_registrations=True) actual = [n['id'] for n in data['node']['registrations']] assert_equal(actual, expected)
def test_view_project_pointer_count_excludes_folders(self): pointer_project = ProjectFactory( is_public=True) # project that points to another project pointed_project = self.node # project that other project points to pointer_project.add_pointer(pointed_project, Auth(pointer_project.creator), save=True) # Project is in a organizer collection folder = CollectionFactory(creator=pointed_project.creator) folder.add_pointer(pointed_project, Auth(pointed_project.creator), save=True) result = _view_project(pointed_project, Auth(pointed_project.creator)) # pointer_project is included in count, but not folder assert_equal(result['node']['points'], 1)
def bitbucket_page(*args, **kwargs): user = kwargs['auth'].user node = kwargs['node'] or kwargs['project'] bitbucket = kwargs['node_addon'] data = _view_project(node, user) content = _page_content(node, bitbucket, data) rv = { 'addon_title': 'Bitbucket', 'addon_page': content, 'addon_page_js': bitbucket.config.include_js['page'], 'addon_page_css': bitbucket.config.include_css['page'], } rv.update(data) return rv
def test_serialize_node_summary_is_contributor_osf_group(self): project = ProjectFactory() user = UserFactory() group = OSFGroupFactory(creator=user) project.add_osf_group(group, permissions.WRITE) res = _view_project( project, auth=Auth(user), ) assert_false(res['user']['is_contributor']) assert_true(res['user']['is_contributor_or_group_member']) assert_false(res['user']['is_admin']) assert_true(res['user']['can_edit']) assert_true(res['user']['has_read_permissions']) assert_equal(set(res['user']['permissions']), set([permissions.READ, permissions.WRITE])) assert_true(res['user']['can_comment'])
def zotero_page(**kwargs): auth = kwargs['auth'] node = kwargs['node'] or kwargs['project'] zotero = node.get_addon('zotero') data = _view_project(node, auth) xml = zotero._fetch_references() rv = { 'complete': True, 'xml': xml, 'addon_page_js': zotero.config.include_js['page'], 'addon_page_css': zotero.config.include_css['page'], } rv.update(data) return rv
def project_wiki_edit(auth, wname, **kwargs): node = kwargs['node'] or kwargs['project'] wiki_name = wname.strip() wiki_page = node.get_wiki_page(wiki_name) # ensure home is always lower case since it cannot be renamed if wiki_name.lower() == 'home': wiki_name = 'home' if wiki_page: version = wiki_page.version is_current = wiki_page.is_current content = wiki_page.content wiki_page_api_url = node.api_url_for('project_wiki_page', wname=wiki_name) else: version = 'NA' is_current = False content = '' wiki_page_api_url = None # TODO: Remove duplication with project_wiki_page toc = _serialize_wiki_toc(node, auth=auth) ret = { 'wiki_id': wiki_page._primary_key if wiki_page else None, 'wiki_name': wiki_page.page_name if wiki_page else wiki_name, 'wiki_content': content, 'version': version, 'versions': _get_wiki_versions(node, wiki_name), 'is_current': is_current, 'is_edit': True, 'pages_current': _get_wiki_pages_current(node), 'toc': toc, 'category': node.category, 'urls': { 'api': _get_wiki_api_urls(node, wiki_name, { 'content': node.api_url_for('wiki_page_content', wname=wiki_name), 'page': wiki_page_api_url }), 'web': _get_wiki_web_urls(node, wiki_name), }, } ret.update(_view_project(node, auth, primary=True)) return ret
def nds_home(auth, **kwargs): node = kwargs['node'] or kwargs['project'] # print "hello world" ret = { 'files': { "My dataset" : "My Dataset", "Dataset 2" : "Dataset 2", }, 'member': { "first_nane" : "Ian", "last_name" : "Taylor", }, 'nds_me' : "Ian" } ret.update(_view_project(node, auth, primary=True)) return ret
def project_wiki_page(auth, wname, **kwargs): node = kwargs['node'] or kwargs['project'] anonymous = has_anonymous_link(node, auth) wiki_name = (wname or '').strip() wiki_page = node.get_wiki_page(name=wiki_name) status_code = 200 version = 'NA' is_current = False content = '' if wiki_page: version = wiki_page.version is_current = wiki_page.is_current content = wiki_page.html(node) elif not wiki_page and wiki_name.lower() != 'home': status_code = 404 ret = { 'wiki_id': wiki_page._primary_key if wiki_page else None, 'wiki_name': wiki_page.page_name if wiki_page else wiki_name, 'wiki_content': content, 'page': wiki_page, 'version': version, 'versions': _get_wiki_versions(node, wiki_name, anonymous=anonymous), 'is_current': is_current, 'is_edit': False, 'pages_current': _get_wiki_pages_current(node), 'toc': _serialize_wiki_toc(node, auth=auth), 'category': node.category, 'urls': { 'api': _get_wiki_api_urls(node, wiki_name), 'web': _get_wiki_web_urls(node, wiki_name), }, } ret.update(_view_project(node, auth, primary=True)) return ret, status_code
def project_wiki_compare(auth, wname, wver, **kwargs): node = kwargs['node'] or kwargs['project'] anonymous = has_anonymous_link(node, auth) wiki_name = wname.strip() wiki_page = node.get_wiki_page(wiki_name) toc = _serialize_wiki_toc(node, auth=auth) if not wiki_page: raise HTTPError(http.NOT_FOUND) comparison_page = node.get_wiki_page(wiki_name, wver) if comparison_page: current = wiki_page.content comparison = comparison_page.content sm = difflib.SequenceMatcher(None, comparison, current) content = show_diff(sm) content = content.replace('\n', '<br />') ret = { 'wiki_id': wiki_page._primary_key, 'wiki_name': wiki_page.page_name, 'wiki_content': content, 'versions': _get_wiki_versions(node, wiki_name, anonymous), 'is_current': True, 'is_edit': False, 'version': wiki_page.version, 'compare_version': wver, 'pages_current': _get_wiki_pages_current(node), 'toc': toc, 'category': node.category, 'urls': { 'api': _get_wiki_api_urls(node, wiki_name), 'web': _get_wiki_web_urls(node, wiki_name, wver), }, } ret.update(_view_project(node, auth, primary=True)) return ret raise HTTPError(http.NOT_FOUND)
def figshare_view_file(*args, **kwargs): auth = kwargs['auth'] node = kwargs['node'] or kwargs['project'] node_settings = kwargs['node_addon'] article_id = kwargs.get('aid') or None file_id = kwargs.get('fid') or None anonymous = has_anonymous_link(node, auth) if not article_id or not file_id: raise HTTPError(http.NOT_FOUND) connect = Figshare.from_settings(node_settings.user_settings) if node_settings.figshare_type == 'project': item = connect.project(node_settings, node_settings.figshare_id) else: item = connect.article(node_settings, node_settings.figshare_id) if article_id not in str(item): raise HTTPError(http.NOT_FOUND) article = connect.article(node_settings, article_id) found = False for f in article['items'][0]['files']: if f['id'] == int(file_id): found = f break if not found: raise HTTPError(http.NOT_FOUND) try: # If GUID has already been created, we won't redirect, and can check # whether the file exists below guid = FigShareGuidFile.find_one( Q('node', 'eq', node) & Q('article_id', 'eq', article_id) & Q('file_id', 'eq', file_id)) except: guid = FigShareGuidFile(node=node, article_id=article_id, file_id=file_id) guid.save() redirect_url = check_file_guid(guid) if redirect_url: return redirect(redirect_url) private = not (article['items'][0]['status'] == 'Public') figshare_url = 'http://figshare.com/' if private: figshare_url += 'preview/_preview/{0}'.format( article['items'][0]['article_id']) else: figshare_url += 'articles/{0}/{1}'.format( article['items'][0]['title'].replace(' ', '_'), article['items'][0]['article_id']) version_url = "http://figshare.com/articles/{filename}/{file_id}".format( filename=article['items'][0]['title'], file_id=article['items'][0]['article_id']) download_url = node.api_url + 'figshare/download/article/{aid}/file/{fid}'.format( aid=article_id, fid=file_id) render_url = node.api_url + \ 'figshare/render/article/{aid}/file/{fid}'.format(aid=article_id, fid=file_id) delete_url = node.api_url + 'figshare/article/{aid}/file/{fid}/'.format( aid=article_id, fid=file_id) filename = found['name'] cache_file_name = get_cache_file(article_id, file_id) rendered = get_cache_content(node_settings, cache_file_name) if private: rendered = messages.FIGSHARE_VIEW_FILE_PRIVATE.format( url='http://figshare.com/') elif rendered is None: filename, size, filedata = connect.get_file(node_settings, found) if figshare_settings.MAX_RENDER_SIZE is not None and size > figshare_settings.MAX_RENDER_SIZE: rendered = messages.FIGSHARE_VIEW_FILE_OVERSIZED.format( url=found.get('download_url')) else: rendered = get_cache_content( node_settings, cache_file_name, start_render=True, remote_path=filename, file_content=filedata, download_url=download_url, ) # categories = connect.categories()['items'] # TODO Cache this # categories = ''.join( # ["<option value='{val}'>{label}</option>".format(val=i['id'], label=i['name']) for i in categories]) rv = { 'node': { 'id': node._id, 'title': node.title }, 'file_name': filename, 'rendered': rendered, 'file_status': article['items'][0]['status'], 'file_version': article['items'][0]['version'], 'doi': 'http://dx.doi.org/10.6084/m9.figshare.{0}'.format( article['items'][0]['article_id']), 'parent_type': 'fileset' if article['items'][0]['defined_type'] == 'fileset' else 'singlefile', 'parent_id': article['items'][0]['article_id'], # 'figshare_categories': categories, 'figshare_title': article['items'][0]['title'], 'figshare_desc': article['items'][0]['description'], 'render_url': render_url, 'urls': { 'render': render_url, 'download': found.get('download_url'), 'version': version_url, 'figshare': privacy_info_handle(figshare_url, anonymous), 'delete': delete_url, 'files': node.web_url_for('collect_file_trees') } } rv.update(_view_project(node, auth, primary=True)) return rv
def view_file(auth, **kwargs): node_settings = kwargs['node_addon'] node = kwargs['node'] or kwargs['project'] file_name = kwargs['fid'] file_name_clean = file_name.replace('.', '_') try: guid = OsfGuidFile.find_one( Q('node', 'eq', node) & Q('name', 'eq', file_name) ) except: guid = OsfGuidFile( node=node, name=file_name, ) guid.save() redirect_url = check_file_guid(guid) if redirect_url: return redirect(redirect_url) # Throw 404 and log error if file not found in files_versions try: file_id = node.files_versions[file_name_clean][-1] except KeyError: logger.error('File {} not found in files_versions of component {}.'.format( file_name_clean, node._id )) raise HTTPError(http.NOT_FOUND) file_object = NodeFile.load(file_id) # Ensure NodeFile is attached to Node; should be fixed by actions or # improved data modeling in future if not file_object.node: file_object.node = node file_object.save() download_url = file_object.download_url(node) render_url = file_object.render_url(node) info_url = file_object.info_url(node) file_path = os.path.join( settings.UPLOADS_PATH, node._primary_key, file_name ) # Throw 404 and log error if file not found on disk if not os.path.isfile(file_path): logger.error('File {} not found on disk.'.format(file_path)) raise HTTPError(http.NOT_FOUND) _, file_ext = os.path.splitext(file_path.lower()) # Get or create rendered file cache_file = get_cache_file( file_object.filename, file_object.latest_version_number(node) ) rendered = get_cache_content( node_settings, cache_file, start_render=True, file_path=file_path, file_content=None, download_path=download_url, ) ret = { 'file_name': file_name, 'render_url': render_url, 'rendered': rendered, 'info_url': info_url, } ret.update(_view_project(node, auth)) return ret
def github_view_file(auth, **kwargs): node = kwargs['node'] or kwargs['project'] node_settings = kwargs['node_addon'] path = get_path(kwargs) file_name = os.path.split(path)[1] # Get branch / commit branch = request.args.get('branch') sha = request.args.get('sha', branch) ref = sha or branch connection = GitHub.from_settings(node_settings.user_settings) # Get current file for delete url current_file = connection.contents(user=node_settings.user, repo=node_settings.repo, path=path, ref=sha or branch) anonymous = has_anonymous_link(node, auth) try: # If GUID has already been created, we won't redirect, and can check # whether the file exists below guid = GithubGuidFile.find_one( Q('node', 'eq', node) & Q('path', 'eq', path)) except ModularOdmException: # If GUID doesn't exist, check whether file exists before creating commits = connection.history( node_settings.user, node_settings.repo, path, ref, ) if not commits: raise HTTPError(http.NOT_FOUND) guid = GithubGuidFile( node=node, path=path, ) guid.save() redirect_url = check_file_guid(guid) if redirect_url: return redirect(redirect_url) # Get default branch if neither SHA nor branch is provided if ref is None: repo = connection.repo(node_settings.user, node_settings.repo) ref = branch = repo.default_branch # Get file history; use SHA or branch if registered, else branch start_sha = ref if node.is_registration else branch commits = connection.history(node_settings.user, node_settings.repo, path, sha=start_sha) # Get current commit shas = [commit['sha'] for commit in commits] if not shas: raise HTTPError(http.NOT_FOUND) current_sha = sha if sha in shas else shas[0] # Get file URL download_url = '/' + guid._id + '/download/' + ref_to_params( branch, current_sha) render_url = os.path.join(node.api_url, 'github', 'file', path, 'render') + '/' + ref_to_params( branch, current_sha) delete_url = None if current_file: delete_url = node.api_url_for('github_delete_file', path=path) + ref_to_params( branch, current_file.sha) for commit in commits: commit['download'] = ('/' + guid._id + '/download/' + ref_to_params(sha=commit['sha'])) commit['view'] = ('/' + guid._id + '/' + ref_to_params(branch, sha=commit['sha'])) if anonymous: commit['name'] = 'A user' commit['email'] = '' # Get or create rendered file cache_file_name = get_cache_file( path, current_sha, ) rendered = get_cache_content(node_settings, cache_file_name) if rendered is None: try: _, data, size = connection.file( node_settings.user, node_settings.repo, path, ref=sha, ) except TooBigError: rendered = 'File too large to download.' if rendered is None: # Skip if too large to be rendered. if github_settings.MAX_RENDER_SIZE is not None and size > github_settings.MAX_RENDER_SIZE: rendered = 'File too large to render; download file to view it.' else: rendered = get_cache_content( node_settings, cache_file_name, start_render=True, remote_path=guid.path, file_content=data, download_url=download_url, ) rv = { 'node': { 'id': node._id, 'title': node.title }, 'file_name': file_name, 'files_page_url': node.web_url_for('collect_file_trees'), 'current_sha': current_sha, 'render_url': render_url, 'rendered': rendered, 'download_url': download_url, 'delete_url': delete_url, 'commits': commits, } rv.update(_view_project(node, auth, primary=True)) return rv
def github_view_file(auth, **kwargs): node = kwargs['node'] or kwargs['project'] node_settings = kwargs['node_addon'] path = get_path(kwargs) file_name = os.path.split(path)[1] # Get branch / commit branch = request.args.get('branch') sha = request.args.get('sha', branch) ref = sha or branch connection = GitHub.from_settings(node_settings.user_settings) # Get current file for delete url current_file = connection.contents( user=node_settings.user, repo=node_settings.repo, path=path, ref=sha or branch) anonymous = has_anonymous_link(node, auth) try: # If GUID has already been created, we won't redirect, and can check # whether the file exists below guid = GithubGuidFile.find_one( Q('node', 'eq', node) & Q('path', 'eq', path) ) except ModularOdmException: # If GUID doesn't exist, check whether file exists before creating commits = connection.history( node_settings.user, node_settings.repo, path, ref, ) if not commits: raise HTTPError(http.NOT_FOUND) guid = GithubGuidFile( node=node, path=path, ) guid.save() redirect_url = check_file_guid(guid) if redirect_url: return redirect(redirect_url) # Get default branch if neither SHA nor branch is provided if ref is None: repo = connection.repo(node_settings.user, node_settings.repo) ref = branch = repo.default_branch # Get file history; use SHA or branch if registered, else branch start_sha = ref if node.is_registration else branch commits = connection.history( node_settings.user, node_settings.repo, path, sha=start_sha ) # Get current commit shas = [ commit['sha'] for commit in commits ] if not shas: raise HTTPError(http.NOT_FOUND) current_sha = sha if sha in shas else shas[0] # Get file URL download_url = '/' + guid._id + '/download/' + ref_to_params(branch, current_sha) render_url = os.path.join( node.api_url, 'github', 'file', path, 'render' ) + '/' + ref_to_params(branch, current_sha) delete_url = None if current_file: delete_url = node.api_url_for('github_delete_file', path=path) + ref_to_params(branch, current_file.sha) for commit in commits: commit['download'] = ( '/' + guid._id + '/download/' + ref_to_params(sha=commit['sha']) ) commit['view'] = ( '/' + guid._id + '/' + ref_to_params(branch, sha=commit['sha']) ) if anonymous: commit['name'] = 'A user' commit['email'] = '' # Get or create rendered file cache_file_name = get_cache_file( path, current_sha, ) rendered = get_cache_content(node_settings, cache_file_name) if rendered is None: try: _, data, size = connection.file( node_settings.user, node_settings.repo, path, ref=sha, ) except TooBigError: rendered = 'File too large to download.' if rendered is None: # Skip if too large to be rendered. if github_settings.MAX_RENDER_SIZE is not None and size > github_settings.MAX_RENDER_SIZE: rendered = 'File too large to render; download file to view it.' else: rendered = get_cache_content( node_settings, cache_file_name, start_render=True, remote_path=guid.path, file_content=data, download_url=download_url, ) rv = { 'node': { 'id': node._id, 'title': node.title }, 'file_name': file_name, 'files_page_url': node.web_url_for('collect_file_trees'), 'current_sha': current_sha, 'render_url': render_url, 'rendered': rendered, 'download_url': download_url, 'delete_url': delete_url, 'commits': commits, } rv.update(_view_project(node, auth, primary=True)) return rv
def project_wiki_view(auth, wname, path=None, **kwargs): node = kwargs["node"] or kwargs["project"] anonymous = has_anonymous_link(node, auth) wiki_name = (wname or "").strip() wiki_key = to_mongo_key(wiki_name) wiki_page = node.get_wiki_page(wiki_name) wiki_settings = node.get_addon("wiki") can_edit = ( auth.logged_in and not node.is_registration and (node.has_permission(auth.user, "write") or wiki_settings.is_publicly_editable) ) versions = _get_wiki_versions(node, wiki_name, anonymous=anonymous) # Determine panels used in view panels = {"view", "edit", "compare", "menu"} if request.args and set(request.args).intersection(panels): panels_used = [panel for panel in request.args if panel in panels] num_columns = len(set(panels_used).intersection({"view", "edit", "compare"})) if num_columns == 0: panels_used.append("view") num_columns = 1 else: panels_used = ["view", "menu"] num_columns = 1 try: view = wiki_utils.format_wiki_version( version=request.args.get("view"), num_versions=len(versions), allow_preview=True ) compare = wiki_utils.format_wiki_version( version=request.args.get("compare"), num_versions=len(versions), allow_preview=False ) except InvalidVersionError: raise WIKI_INVALID_VERSION_ERROR # Default versions for view and compare version_settings = { "view": view or ("preview" if "edit" in panels_used else "current"), "compare": compare or "previous", } # ensure home is always lower case since it cannot be renamed if wiki_name.lower() == "home": wiki_name = "home" if wiki_page: version = wiki_page.version is_current = wiki_page.is_current content = wiki_page.html(node) use_python_render = wiki_page.rendered_before_update else: version = "NA" is_current = False content = "" use_python_render = False if can_edit: if wiki_key not in node.wiki_private_uuids: wiki_utils.generate_private_uuid(node, wiki_name) sharejs_uuid = wiki_utils.get_sharejs_uuid(node, wiki_name) else: if wiki_key not in node.wiki_pages_current and wiki_key != "home": raise WIKI_PAGE_NOT_FOUND_ERROR if "edit" in request.args: if wiki_settings.is_publicly_editable: raise HTTPError(http.UNAUTHORIZED) raise HTTPError(http.FORBIDDEN) sharejs_uuid = None ret = { "wiki_id": wiki_page._primary_key if wiki_page else None, "wiki_name": wiki_page.page_name if wiki_page else wiki_name, "wiki_content": content, "use_python_render": use_python_render, "page": wiki_page, "version": version, "versions": versions, "sharejs_uuid": sharejs_uuid or "", "sharejs_url": settings.SHAREJS_URL, "is_current": is_current, "version_settings": version_settings, "pages_current": _get_wiki_pages_current(node), "category": node.category, "panels_used": panels_used, "num_columns": num_columns, "urls": { "api": _get_wiki_api_urls( node, wiki_name, { "content": node.api_url_for("wiki_page_content", wname=wiki_name), "draft": node.api_url_for("wiki_page_draft", wname=wiki_name), }, ), "web": _get_wiki_web_urls(node, wiki_name), "gravatar": get_gravatar(auth.user, 25), }, } ret.update(_view_project(node, auth, primary=True)) ret["user"]["can_edit_wiki_body"] = can_edit return ret
def test_view_project_embed_registrations_includes_contribution_count(self, mock_archive): self.project.register_node(get_default_metaschema(), Auth(user=self.project.creator), '', None) data = _view_project(node=self.project, auth=Auth(self.project.creator), embed_registrations=True) assert_is_not_none(data['node']['registrations'][0]['nlogs'])
def menbib_get_page_info(node_addon, auth, **kwargs): folder = request.args.get('folder') if node_addon.user_settings is None: flash('Authorize Mendeley add-on in Settings', 'warning') raise HTTPError(http.FORBIDDEN) node = node_addon.owner user_settings = node_addon.user_settings client = get_node_addon_client(node_addon) client.from_settings(user_settings) client.refresh_access_token() user_folders = client.folders() user_library = client.library() user_folders_id = [] user_folders_name = [] for idx in range(0, len(user_folders)): user_folders_id.append(user_folders[idx]['id']) user_folders_name.append(user_folders[idx]['name']) if folder is not None: idx = user_folders_name.index(folder) folder_documentId = client.folder_details(user_folders_id[idx]) documentId = folder_documentId['document_ids'] else: documentId = user_library['document_ids'] doc_meta = [] for idx in range(0, len(documentId)): meta = client.document_details(documentId[idx]) author = [] second_line = '' for idy in range(0,len(meta['authors'])): author.append({ 'family':meta['authors'][idy]['surname'], 'given': meta['authors'][idy]['forename'], }) second_line = second_line + meta['authors'][idy]['forename'] + ' ' \ + meta['authors'][idy]['surname'] + ', ' second_line = second_line[:-2] second_line = second_line + ' (' + str(meta.get('year','0')) + ')' third_line = meta['published_in'] + ' ' \ + meta.get('volume', '') + ' ' \ + '(' + meta.get('issue', '') + ')' + ' p. ' + \ meta.get('pages', '') doc_meta.append({ "author": author, "id": meta['id'], "issued": { "date-parts": [ [ meta.get('year','0'), meta.get('month','0'), meta.get('day','0'), ] ] }, "title": meta.get('title',"").replace('.',''), "type": meta.get('type',"").lower(), "abstract": meta.get('abstract',""), "publisher": meta.get('published_in',""), "volume": meta.get('volume',""), "page": meta.get('pages',""), "url": meta.get('url'," "), "second_line": second_line, "third_line": third_line.replace('()', '').strip(' p. '), }) data = _view_project(node, auth, primary=True) rv = _page_content(node, node_addon) rv.update({ 'addon_page_js': user_settings.config.include_js.get('page'), 'addon_page_css': user_settings.config.include_css.get('page'), 'items': doc_meta, 'citation_styles': menbib_settings.CITATION_STYLES, 'export_formats': menbib_settings.EXPORT_FORMATS, 'folder_names': user_folders_name, }) rv.update(user_settings.config.to_json()) rv.update(data) return rv
def project_wiki_view(auth, wname, path=None, **kwargs): node = kwargs['node'] or kwargs['project'] anonymous = has_anonymous_link(node, auth) wiki_name = (wname or '').strip() wiki_key = to_mongo_key(wiki_name) wiki_page = node.get_wiki_page(wiki_name) wiki_version = node.get_wiki_version(wiki_name) wiki_settings = node.get_addon('wiki') can_edit = (auth.logged_in and not node.is_registration and (node.has_permission(auth.user, 'write') or wiki_settings.is_publicly_editable)) versions = _get_wiki_versions(node, wiki_name, anonymous=anonymous) # Determine panels used in view panels = {'view', 'edit', 'compare', 'menu'} if request.args and set(request.args).intersection(panels): panels_used = [panel for panel in request.args if panel in panels] num_columns = len( set(panels_used).intersection({'view', 'edit', 'compare'})) if num_columns == 0: panels_used.append('view') num_columns = 1 else: panels_used = ['view', 'menu'] num_columns = 1 try: view = wiki_utils.format_wiki_version( version=request.args.get('view'), num_versions=len(versions), allow_preview=True, ) compare = wiki_utils.format_wiki_version( version=request.args.get('compare'), num_versions=len(versions), allow_preview=False, ) except InvalidVersionError: raise WIKI_INVALID_VERSION_ERROR # ensure home is always lower case since it cannot be renamed if wiki_name.lower() == 'home': wiki_name = 'home' if wiki_version: version = wiki_version.identifier is_current = wiki_version.is_current content = wiki_version.html(node) rendered_before_update = wiki_version.rendered_before_update else: version = 'NA' is_current = False content = '' rendered_before_update = False if can_edit: if wiki_key not in node.wiki_private_uuids: wiki_utils.generate_private_uuid(node, wiki_name) sharejs_uuid = wiki_utils.get_sharejs_uuid(node, wiki_name) else: if not wiki_page and wiki_key != 'home': raise WIKI_PAGE_NOT_FOUND_ERROR if 'edit' in request.args: if wiki_settings.is_publicly_editable: raise HTTPError(http.UNAUTHORIZED) if node.can_view(auth): return redirect( node.web_url_for('project_wiki_view', wname=wname, _guid=True)) raise HTTPError(http.FORBIDDEN) sharejs_uuid = None # Opens 'edit' panel when home wiki is empty if not content and can_edit and wiki_name == 'home': panels_used.append('edit') # Default versions for view and compare version_settings = { 'view': view or ('preview' if 'edit' in panels_used else 'current'), 'compare': compare or 'previous', } ret = { 'wiki_id': wiki_page._primary_key if wiki_page else None, 'wiki_name': wiki_page.page_name if wiki_page else wiki_name, 'wiki_content': content, 'rendered_before_update': rendered_before_update, 'page': wiki_page, 'version': version, 'versions': versions, 'sharejs_uuid': sharejs_uuid or '', 'sharejs_url': settings.SHAREJS_URL, 'is_current': is_current, 'version_settings': version_settings, 'pages_current': _get_wiki_pages_latest(node), 'category': node.category, 'panels_used': panels_used, 'num_columns': num_columns, 'urls': { 'api': _get_wiki_api_urls( node, wiki_name, { 'content': node.api_url_for('wiki_page_content', wname=wiki_name), 'draft': node.api_url_for('wiki_page_draft', wname=wiki_name), }), 'web': _get_wiki_web_urls(node, wiki_name), 'profile_image': get_profile_image_url(auth.user, 25), }, } ret.update(_view_project(node, auth, primary=True)) ret['user']['can_edit_wiki_body'] = can_edit return ret
def serialize_node(*args, **kwargs): from website.project.views.node import _view_project return _view_project(*args, **kwargs) # Not recommended practice
def collect_timestamp_trees_to_json(auth, node, **kwargs): # admin call project to provider file list serialized = _view_project(node, auth, primary=True) serialized.update(rubeus.collect_addon_assets(node)) user_info = OSFUser.objects.get(id=Guid.objects.get(_id=serialized['user']['id']).object_id) api_url = util.api_v2_url(api_url_path(kwargs.get('pid'))) cookie = user_info.get_or_create_cookie() cookies = {settings.COOKIE_NAME: cookie} headers = {'content-type': 'application/json'} provider_json_res = None file_res = requests.get(api_url, headers=headers, cookies=cookies) provider_json_res = file_res.json() file_res.close() provider_list = [] for provider_data in provider_json_res['data']: waterbutler_meta_url = util.waterbutler_api_url_for( kwargs.get('pid'), provider_data['attributes']['provider'], '/', **dict(waterbutler_meta_parameter()) ) waterbutler_json_res = None waterbutler_res = requests.get(waterbutler_meta_url, headers=headers, cookies=cookies) waterbutler_json_res = waterbutler_res.json() waterbutler_res.close() file_list = [] child_file_list = [] for file_data in waterbutler_json_res['data']: if file_data['attributes']['kind'] == 'folder': child_file_list.extend( waterbutler_folder_file_info( kwargs.get('pid'), provider_data['attributes']['provider'], file_data['attributes']['path'], node, cookies, headers ) ) else: file_info = None basefile_node = BaseFileNode.resolve_class( provider_data['attributes']['provider'], BaseFileNode.FILE ).get_or_create( node, file_data['attributes']['path'] ) basefile_node.save() if provider_data['attributes']['provider'] == 'osfstorage': file_info = { 'file_name': file_data['attributes']['name'], 'file_path': file_data['attributes']['materialized'], 'file_kind': file_data['attributes']['kind'], 'file_id': basefile_node._id, 'version': file_data['attributes']['extra']['version'] } else: file_info = { 'file_name': file_data['attributes']['name'], 'file_path': file_data['attributes']['materialized'], 'file_kind': file_data['attributes']['kind'], 'file_id': basefile_node._id, 'version': '' } if file_info: file_list.append(file_info) file_list.extend(child_file_list) if file_list: provider_files = { 'provider': provider_data['attributes']['provider'], 'provider_file_list': file_list } provider_list.append(provider_files) return {'provider_list': provider_list}
def figshare_view_file(*args, **kwargs): auth = kwargs['auth'] node = kwargs['node'] or kwargs['project'] node_settings = kwargs['node_addon'] article_id = kwargs.get('aid') or None file_id = kwargs.get('fid') or None anonymous = has_anonymous_link(node, auth) if not article_id or not file_id: raise HTTPError(http.NOT_FOUND) connect = Figshare.from_settings(node_settings.user_settings) if node_settings.figshare_type == 'project': item = connect.project(node_settings, node_settings.figshare_id) else: item = connect.article(node_settings, node_settings.figshare_id) if article_id not in str(item): raise HTTPError(http.NOT_FOUND) article = connect.article(node_settings, article_id) found = False for f in article['items'][0]['files']: if f['id'] == int(file_id): found = f break if not found: raise HTTPError(http.NOT_FOUND) try: # If GUID has already been created, we won't redirect, and can check # whether the file exists below guid = FigShareGuidFile.find_one( Q('node', 'eq', node) & Q('article_id', 'eq', article_id) & Q('file_id', 'eq', file_id) ) except: guid = FigShareGuidFile(node=node, article_id=article_id, file_id=file_id) guid.save() redirect_url = check_file_guid(guid) if redirect_url: return redirect(redirect_url) private = not(article['items'][0]['status'] == 'Public') figshare_url = 'http://figshare.com/' if private: figshare_url += 'preview/_preview/{0}'.format(article['items'][0]['article_id']) else: figshare_url += 'articles/{0}/{1}'.format(article['items'][0]['title'].replace(' ', '_'), article['items'][0]['article_id']) version_url = "http://figshare.com/articles/{filename}/{file_id}".format( filename=article['items'][0]['title'], file_id=article['items'][0]['article_id']) download_url = node.api_url + 'figshare/download/article/{aid}/file/{fid}'.format(aid=article_id, fid=file_id) render_url = node.api_url + \ 'figshare/render/article/{aid}/file/{fid}'.format(aid=article_id, fid=file_id) delete_url = node.api_url + 'figshare/article/{aid}/file/{fid}/'.format(aid=article_id, fid=file_id) filename = found['name'] cache_file_name = get_cache_file( article_id, file_id ) rendered = get_cache_content(node_settings, cache_file_name) if private: rendered = messages.FIGSHARE_VIEW_FILE_PRIVATE.format(url='http://figshare.com/') elif rendered is None: filename, size, filedata = connect.get_file(node_settings, found) if figshare_settings.MAX_RENDER_SIZE is not None and size > figshare_settings.MAX_RENDER_SIZE: rendered = messages.FIGSHARE_VIEW_FILE_OVERSIZED.format( url=found.get('download_url')) else: rendered = get_cache_content( node_settings, cache_file_name, start_render=True, remote_path=filename, file_content=filedata, download_url=download_url, ) categories = connect.categories()['items'] # TODO Cache this categories = ''.join( ["<option value='{val}'>{label}</option>".format(val=i['id'], label=i['name']) for i in categories]) rv = { 'node': { 'id': node._id, 'title': node.title }, 'file_name': filename, 'rendered': rendered, 'file_status': article['items'][0]['status'], 'file_version': article['items'][0]['version'], 'doi': 'http://dx.doi.org/10.6084/m9.figshare.{0}'.format(article['items'][0]['article_id']), 'parent_type': 'fileset' if article['items'][0]['defined_type'] == 'fileset' else 'singlefile', 'parent_id': article['items'][0]['article_id'], 'figshare_categories': categories, 'figshare_title': article['items'][0]['title'], 'figshare_desc': article['items'][0]['description'], 'urls': { 'render': render_url, 'download': found.get('download_url'), 'version': version_url, 'figshare': privacy_info_handle(figshare_url, anonymous), 'delete': delete_url, 'files': node.web_url_for('collect_file_trees') } } rv.update(_view_project(node, auth, primary=True)) return rv
def get_init_timestamp_error_data_list(auth, node, **kwargs): """ get timestamp error data list (OSF view) """ ctx = _view_project(node, auth, primary=True) ctx.update(rubeus.collect_addon_assets(node)) data_list = RdmFileTimestamptokenVerifyResult.objects.filter(project_id=kwargs.get('pid')).order_by('provider', 'path') provider_error_list = [] provider = None error_list = [] for data in data_list: if data.inspection_result_status == api_settings.TIME_STAMP_TOKEN_CHECK_SUCCESS: continue if not provider: provider = data.provider elif provider != data.provider: provider_error_list.append({'provider': provider, 'error_list': error_list}) provider = data.provider error_list = [] if data.inspection_result_status in VERIFY_RESULT: verify_result_title = VERIFY_RESULT[data.inspection_result_status] else: # 'FILE missing(Unverify)' verify_result_title = \ api_settings.FILE_NOT_EXISTS_TIME_STAMP_TOKEN_CHECK_FILE_NOT_FOUND_MSG if not data.update_user: operator_user = OSFUser.objects.get(id=data.create_user).fullname operator_date = data.create_date.strftime('%Y/%m/%d %H:%M:%S') else: operator_user = OSFUser.objects.get(id=data.update_user).fullname operator_date = data.update_date.strftime('%Y/%m/%d %H:%M:%S') if provider == 'osfstorage': base_file_data = BaseFileNode.objects.get(_id=data.file_id) error_info = { 'file_name': base_file_data.name, 'file_path': data.path, 'file_kind': 'file', 'project_id': data.project_id, 'file_id': data.file_id, 'version': base_file_data.current_version_number, 'operator_user': operator_user, 'operator_date': operator_date, 'verify_result_title': verify_result_title } else: file_name = os.path.basename(data.path) error_info = { 'file_name': file_name, 'file_path': data.path, 'file_kind': 'file', 'project_id': data.project_id, 'file_id': data.file_id, 'version': '', 'operator_user': operator_user, 'operator_date': operator_date, 'verify_result_title': verify_result_title } error_list.append(error_info) if error_list: provider_error_list.append({'provider': provider, 'error_list': error_list}) ctx['provider_list'] = provider_error_list ctx['project_title'] = node.title ctx['guid'] = kwargs.get('pid') ctx['web_api_url'] = settings.DOMAIN + node.api_url return ctx
def mendeley_page(*args, **kwargs): folder = request.args.get('folder') user = kwargs['auth'] node = kwargs['node'] or kwargs['project'] mendeley = kwargs['node_addon'] mendeley_user = user.user.get_addon('mendeley') code = request.args.get('code') token = oauth_refresh_token(mendeley_user.oauth_refresh_token, code, user.user, mendeley_user.oauth_token_expires, mendeley_user.oauth_token,) mendeley_user.oauth_access_token = token['access_token'] mendeley_user.oauth_refresh_token = token['refresh_token'] mendeley_user.oauth_token_type = token['token_type'] mendeley_user.oauth_token_expires = token['expires_in'] connect = Mendeley.from_settings(mendeley.user_settings) user_library = connect.library(mendeley.user_settings) user_folders = connect.folders(mendeley.user_settings) user_folders_id = [] user_folders_name = [] for idx in range(0, len(user_folders)): user_folders_id.append(user_folders[idx]['id']) user_folders_name.append(user_folders[idx]['name']) if folder != None: idx = user_folders_name.index(folder) folder_documentId = connect.folder_details(mendeley.user_settings, user_folders_id[idx]) documentId = folder_documentId['document_ids'] else: documentId = user_library['document_ids'] doc_meta = [] for idx in range(0, len(documentId)): meta = connect.document_details(mendeley.user_settings, documentId[idx]) author = [] second_line = '' for idy in range(0,len(meta['authors'])): author.append({ 'family':meta['authors'][idy]['surname'], 'given': meta['authors'][idy]['forename'], }) second_line = second_line + meta['authors'][idy]['forename'] + ' ' \ + meta['authors'][idy]['surname'] + ', ' second_line = second_line[:-2] second_line = second_line + ' (' + str(meta.get('year','0')) + ')' third_line = meta['published_in'] + ' ' \ + meta.get('volume', '') + ' ' \ + '(' + meta.get('issue', '') + ')' + ' p. ' + \ meta.get('pages', '') doc_meta.append({ "author": author, "id": meta['id'], "issued": { "date-parts": [ [ meta.get('year','0'), meta.get('month','0'), meta.get('day','0'), ] ] }, "title": meta.get('title',"").replace('.',''), "type": meta.get('type',"").lower(), "abstract": meta.get('abstract',""), "publisher": meta.get('published_in',""), "volume": meta.get('volume',""), "page": meta.get('pages',""), "url": meta.get('url'," "), "second_line": second_line, "third_line": third_line.replace('()', '').strip(' p. '), }) data = _view_project(node, user, primary=True) rv = _page_content(node, mendeley) rv.update({ 'addon_page_js': mendeley_user.config.include_js.get('page'), 'addon_page_css': mendeley_user.config.include_css.get('page'), 'items': doc_meta, 'citation_styles': CITATION_STYLES, 'export_formats': EXPORT_FORMATS, 'folder_names': user_folders_name, }) rv.update(mendeley_user.config.to_json()) rv.update(data) return rv
def s3_view(**kwargs): path = kwargs.get('path') vid = request.args.get('vid') if not path: raise HTTPError(http.NOT_FOUND) if vid == 'Pre-versioning': vid = 'null' node_settings = kwargs['node_addon'] auth = kwargs['auth'] node = kwargs['node'] or kwargs['project'] wrapper = S3Wrapper.from_addon(node_settings) key = wrapper.get_wrapped_key(urllib.unquote(path), vid=vid) if key is None: raise HTTPError(http.NOT_FOUND) try: guid = S3GuidFile.find_one( Q('node', 'eq', node) & Q('path', 'eq', path) ) except: guid = S3GuidFile( node=node, path=path, ) guid.save() redirect_url = check_file_guid(guid) if redirect_url: return redirect(redirect_url) cache_file_name = get_cache_file_name(path, key.etag) urls = build_urls(node, path, etag=key.etag) if key.s3Key.size > MAX_RENDER_SIZE: render = 'File too large to render; download file to view it' else: # Check to see if the file has already been rendered. render = get_cache_content(node_settings, cache_file_name) if render is None: file_contents = key.s3Key.get_contents_as_string() render = get_cache_content( node_settings, cache_file_name, start_render=True, file_content=file_contents, download_url=urls['download'], ) versions = create_version_list(wrapper, urllib.unquote(path), node) rv = { 'file_name': key.name, 'rendered': render, 'download_url': urls['download'], 'render_url': urls['render'], 'versions': versions, 'current': key.version_id, 'info_url': urls['info'], 'delete_url': urls['delete'], 'files_page_url': node.web_url_for('collect_file_trees') } rv.update(_view_project(node, auth, primary=True)) return rv
def view_file(auth, **kwargs): node_settings = kwargs['node_addon'] node = kwargs['node'] or kwargs['project'] file_name = kwargs['fid'] file_name_clean = file_name.replace('.', '_') try: guid = OsfGuidFile.find_one( Q('node', 'eq', node) & Q('name', 'eq', file_name) ) except: guid = OsfGuidFile( node=node, name=file_name, ) guid.save() redirect_url = check_file_guid(guid) if redirect_url: return redirect(redirect_url) # Throw 404 and log error if file not found in files_versions try: file_id = node.files_versions[file_name_clean][-1] except KeyError: logger.error('File {} not found in files_versions of component {}.'.format( file_name_clean, node._id )) raise HTTPError(http.NOT_FOUND) file_object = NodeFile.load(file_id) # Ensure NodeFile is attached to Node; should be fixed by actions or # improved data modeling in future if not file_object.node: file_object.node = node file_object.save() download_url = file_object.download_url(node) render_url = file_object.render_url(node) info_url = file_object.info_url(node) file_path = os.path.join( settings.UPLOADS_PATH, node._primary_key, file_name ) # Throw 404 and log error if file not found on disk if not os.path.isfile(file_path): logger.error('File {} not found on disk.'.format(file_path)) raise HTTPError(http.NOT_FOUND) _, file_ext = os.path.splitext(file_path.lower()) # Get or create rendered file cache_file = get_cache_file( file_object.filename, file_object.latest_version_number(node) ) rendered = get_cache_content( node_settings, cache_file, start_render=True, file_path=file_path, file_content=None, download_path=download_url, ) rv = { 'file_name': file_name, 'render_url': render_url, 'rendered': rendered, 'info_url': info_url, } rv.update(_view_project(node, auth)) return rv
def s3_view(**kwargs): path = kwargs.get('path') vid = request.args.get('vid') if not path: raise HTTPError(http.NOT_FOUND) if vid == 'Pre-versioning': vid = 'null' node_settings = kwargs['node_addon'] auth = kwargs['auth'] node = kwargs['node'] or kwargs['project'] wrapper = S3Wrapper.from_addon(node_settings) key = wrapper.get_wrapped_key(urllib.unquote(path), vid=vid) if key is None: raise HTTPError(http.NOT_FOUND) try: guid = S3GuidFile.find_one( Q('node', 'eq', node) & Q('path', 'eq', path)) except: guid = S3GuidFile( node=node, path=path, ) guid.save() redirect_url = check_file_guid(guid) if redirect_url: return redirect(redirect_url) cache_file_name = get_cache_file_name(path, key.etag) urls = build_urls(node, path, etag=key.etag) if key.s3Key.size > MAX_RENDER_SIZE: render = 'File too large to render; download file to view it' else: # Check to see if the file has already been rendered. render = get_cache_content(node_settings, cache_file_name) if render is None: file_contents = key.s3Key.get_contents_as_string() render = get_cache_content( node_settings, cache_file_name, start_render=True, remote_path=path, file_content=file_contents, download_url=urls['download'], ) versions = create_version_list(wrapper, urllib.unquote(path), node) rv = { 'file_name': key.name, 'rendered': render, 'download_url': urls['download'], 'render_url': urls['render'], 'versions': versions, 'current': key.version_id, 'info_url': urls['info'], 'delete_url': urls['delete'], 'files_page_url': node.web_url_for('collect_file_trees') } rv.update(_view_project(node, auth, primary=True)) return rv
def project_wiki_view(auth, wname, path=None, **kwargs): node = kwargs['node'] or kwargs['project'] anonymous = has_anonymous_link(node, auth) wiki_name = (wname or '').strip() wiki_key = to_mongo_key(wiki_name) wiki_page = node.get_wiki_page(wiki_name) wiki_settings = node.get_addon('wiki') can_edit = ( auth.logged_in and not node.is_registration and ( node.has_permission(auth.user, 'write') or wiki_settings.is_publicly_editable ) ) versions = _get_wiki_versions(node, wiki_name, anonymous=anonymous) # Determine panels used in view panels = {'view', 'edit', 'compare', 'menu'} if request.args and set(request.args).intersection(panels): panels_used = [panel for panel in request.args if panel in panels] num_columns = len(set(panels_used).intersection({'view', 'edit', 'compare'})) if num_columns == 0: panels_used.append('view') num_columns = 1 else: panels_used = ['view', 'menu'] num_columns = 1 try: view = wiki_utils.format_wiki_version( version=request.args.get('view'), num_versions=len(versions), allow_preview=True, ) compare = wiki_utils.format_wiki_version( version=request.args.get('compare'), num_versions=len(versions), allow_preview=False, ) except InvalidVersionError: raise WIKI_INVALID_VERSION_ERROR # Default versions for view and compare version_settings = { 'view': view or ('preview' if 'edit' in panels_used else 'current'), 'compare': compare or 'previous', } # ensure home is always lower case since it cannot be renamed if wiki_name.lower() == 'home': wiki_name = 'home' if wiki_page: version = wiki_page.version is_current = wiki_page.is_current content = wiki_page.html(node) use_python_render = wiki_page.rendered_before_update else: version = 'NA' is_current = False content = '' use_python_render = False if can_edit: if wiki_key not in node.wiki_private_uuids: wiki_utils.generate_private_uuid(node, wiki_name) sharejs_uuid = wiki_utils.get_sharejs_uuid(node, wiki_name) else: if wiki_key not in node.wiki_pages_current and wiki_key != 'home': raise WIKI_PAGE_NOT_FOUND_ERROR if 'edit' in request.args: if wiki_settings.is_publicly_editable: raise HTTPError(http.UNAUTHORIZED) raise HTTPError(http.FORBIDDEN) sharejs_uuid = None # Opens 'edit' panel when home wiki is empty if not content and can_edit and wiki_name == 'home': panels_used.append('edit') ret = { 'wiki_id': wiki_page._primary_key if wiki_page else None, 'wiki_name': wiki_page.page_name if wiki_page else wiki_name, 'wiki_content': content, 'use_python_render': use_python_render, 'page': wiki_page, 'version': version, 'versions': versions, 'sharejs_uuid': sharejs_uuid or '', 'sharejs_url': settings.SHAREJS_URL, 'is_current': is_current, 'version_settings': version_settings, 'pages_current': _get_wiki_pages_current(node), 'category': node.category, 'panels_used': panels_used, 'num_columns': num_columns, 'urls': { 'api': _get_wiki_api_urls(node, wiki_name, { 'content': node.api_url_for('wiki_page_content', wname=wiki_name), 'draft': node.api_url_for('wiki_page_draft', wname=wiki_name), }), 'web': _get_wiki_web_urls(node, wiki_name), 'gravatar': get_gravatar(auth.user, 25), }, } ret.update(_view_project(node, auth, primary=True)) ret['user']['can_edit_wiki_body'] = can_edit return ret