def test_view_creates_guid(self, mock_from_addon, mock_wrapped_key): mock_from_addon.return_value = create_mock_wrapper() mock_wrapped_key.return_value = create_mock_key() guid_count = S3GuidFile.find().count() # View file for the first time url = self.project.url + 's3/test.py' res = self.app.get(url, auth=self.user.auth).maybe_follow(auth=self.user.auth) guids = S3GuidFile.find() # GUID count has been incremented by one assert_equal( guids.count(), guid_count + 1 ) # Client has been redirected to GUID assert_equal( res.request.path.strip('/'), guids[guids.count() - 1]._id ) # View file for the second time self.app.get(url, auth=self.user.auth).follow(auth=self.user.auth) # GUID count has not been incremented assert_equal( S3GuidFile.find().count(), guid_count + 1 )
def test_view_creates_guid(self, mock_from_addon, mock_wrapped_key): mock_from_addon.return_value = create_mock_wrapper() mock_wrapped_key.return_value = create_mock_key() guid_count = S3GuidFile.find().count() # View file for the first time url = self.project.url + 's3/test.py' res = self.app.get( url, auth=self.user.auth).maybe_follow(auth=self.user.auth) guids = S3GuidFile.find() # GUID count has been incremented by one assert_equal(guids.count(), guid_count + 1) # Client has been redirected to GUID assert_equal(res.request.path.strip('/'), guids[guids.count() - 1]._id) # View file for the second time self.app.get(url, auth=self.user.auth).follow(auth=self.user.auth) # GUID count has not been incremented assert_equal(S3GuidFile.find().count(), guid_count + 1)
def test_unique_identifier(self, mock_get): mock_response = mock.Mock(ok=True, status_code=200) mock_get.return_value = mock_response mock_response.json.return_value = { 'data': { 'name': 'Morty', 'extra': { 'md5': 'Terran it up' } } } guid = S3GuidFile(node=self.project, path='/foo/bar') guid.enrich() assert_equals('Terran it up', guid.unique_identifier)
def test_correct_path(self): guid = S3GuidFile(node=self.project, path='baz/foo/bar') assert_equals(guid.path, 'baz/foo/bar') assert_equals(guid.waterbutler_path, '/baz/foo/bar')
def test_provider(self): assert_equal('s3', S3GuidFile().provider)
def s3_view(**kwargs): path = kwargs.get('path') vid = request.args.get('vid') if not path: raise HTTPError(http.NOT_FOUND) if vid == 'Pre-versioning': vid = 'null' node_settings = kwargs['node_addon'] auth = kwargs['auth'] node = kwargs['node'] or kwargs['project'] wrapper = S3Wrapper.from_addon(node_settings) key = wrapper.get_wrapped_key(urllib.unquote(path), vid=vid) if key is None: raise HTTPError(http.NOT_FOUND) try: guid = S3GuidFile.find_one( Q('node', 'eq', node) & Q('path', 'eq', path) ) except: guid = S3GuidFile( node=node, path=path, ) guid.save() redirect_url = check_file_guid(guid) if redirect_url: return redirect(redirect_url) cache_file_name = get_cache_file_name(path, key.etag) urls = build_urls(node, path, etag=key.etag) if key.s3Key.size > MAX_RENDER_SIZE: render = 'File too large to render; download file to view it' else: # Check to see if the file has already been rendered. render = get_cache_content(node_settings, cache_file_name) if render is None: file_contents = key.s3Key.get_contents_as_string() render = get_cache_content( node_settings, cache_file_name, start_render=True, file_content=file_contents, download_url=urls['download'], ) versions = create_version_list(wrapper, urllib.unquote(path), node) rv = { 'file_name': key.name, 'rendered': render, 'download_url': urls['download'], 'render_url': urls['render'], 'versions': versions, 'current': key.version_id, 'info_url': urls['info'], 'delete_url': urls['delete'], 'files_page_url': node.web_url_for('collect_file_trees') } rv.update(_view_project(node, auth, primary=True)) return rv
def s3_view(**kwargs): path = kwargs.get('path') vid = request.args.get('vid') if not path: raise HTTPError(http.NOT_FOUND) if vid == 'Pre-versioning': vid = 'null' node_settings = kwargs['node_addon'] auth = kwargs['auth'] node = kwargs['node'] or kwargs['project'] wrapper = S3Wrapper.from_addon(node_settings) key = wrapper.get_wrapped_key(urllib.unquote(path), vid=vid) if key is None: raise HTTPError(http.NOT_FOUND) try: guid = S3GuidFile.find_one( Q('node', 'eq', node) & Q('path', 'eq', path)) except: guid = S3GuidFile( node=node, path=path, ) guid.save() redirect_url = check_file_guid(guid) if redirect_url: return redirect(redirect_url) cache_file_name = get_cache_file_name(path, key.etag) urls = build_urls(node, path, etag=key.etag) if key.s3Key.size > MAX_RENDER_SIZE: render = 'File too large to render; download file to view it' else: # Check to see if the file has already been rendered. render = get_cache_content(node_settings, cache_file_name) if render is None: file_contents = key.s3Key.get_contents_as_string() render = get_cache_content( node_settings, cache_file_name, start_render=True, remote_path=path, file_content=file_contents, download_url=urls['download'], ) versions = create_version_list(wrapper, urllib.unquote(path), node) rv = { 'file_name': key.name, 'rendered': render, 'download_url': urls['download'], 'render_url': urls['render'], 'versions': versions, 'current': key.version_id, 'info_url': urls['info'], 'delete_url': urls['delete'], 'files_page_url': node.web_url_for('collect_file_trees') } rv.update(_view_project(node, auth, primary=True)) return rv