def deco(*args, **kwds): # Combine to handle both GET arguments and form data c = {} c.update(dict(six.iteritems(flask.request.form))) c.update(dict(six.iteritems(flask.request.args))) log.debug("Combined arguments: %s", c) if not {'girder_token', 'girder_origin', 'girder_apiRoot'} \ .difference(c.keys()): g_token = c['girder_token'] log.debug("G-token: %s", g_token) g_origin = c['girder_origin'] log.debug("G-origin: %s", g_origin) g_apiRoot = c['girder_apiRoot'] log.debug("G-apiRoot: %s", g_apiRoot) g_api_header = {'Girder-Token': g_token} # Attempt getting current user using supplied token. If it # succeeds and matches the given user, log them in here. # Get user ID from token log.debug('Getting Girder current token info') r = requests.get(url_join(g_origin, g_apiRoot, 'token/current'), headers=g_api_header) if r.json() is None: flask.flash("Invalid Girder token credentials token", 'error') return flask.redirect( flask.url_for("login.login") + "?next=" + flask.request.url) else: g_userId = r.json()['userId'] # Get user label and name from ID log.debug("Getting user info from ID") r = requests.get(url_join(g_origin, g_apiRoot, 'user', g_userId), headers=g_api_header) user_label = r.json()['login'] user_fullname = ' '.join( [r.json()['firstName'], r.json()['lastName']]) log.debug("Logging user '%s' in", user_fullname) # Arbitrarily log the user in as a guest LoginMod._login_user(user_label, { 'fullname': user_fullname, 'roles': ['guest'], }) if 'user' not in flask.session: flask.flash("Login required!", 'error') return flask.redirect( flask.url_for("login.login") + "?next=" + flask.request.url) else: # TODO: Check that user has permission, else redirect return f(*args, **kwds)
def _request_token(self): """ Request an authentication token given an API key. Assumes ``'self.api_key`` is defined and a valid API key value. :raises AssertionError: Expiration timestamp did not have a UTC timezone specifier attacked to the end. :return: token string and expiration timestamp :rtype: str, datetime.datetime """ self._log.debug("Requesting new authorization token.") if self._api_key: r = requests.post( url_join(self._api_root, 'api_key/token'), data={'key': self._api_key} ) r.raise_for_status() token = r.json()['authToken']['token'] expires = r.json()['authToken']['expires'] else: r = requests.get( url_join(self._api_root, 'token/session') ) r.raise_for_status() token = r.json()['token'] expires = r.json()['expires'] return token, self._parse_expiration_timestamp(expires)
def test_url_join_simple(self): # One parameter self.assertEqual(url_join('foo'), 'foo') # multi-parameter, convert to str self.assertEqual(url_join('https://foo', 'bar', 1, 'six'), 'https://foo/bar/1/six')
def test_url_join_restart_protocol(self): # Test restarting url concat due to protocol header nose.tools.assert_equal(url_join('http://a.b.c', 'ftp://ba.c'), 'ftp://ba.c') nose.tools.assert_equal(url_join('', 'a', 'b', 'https://', 'bar', ''), 'https://bar/')
def _request_token(self): """ Request an authentication token given an API key. Assumes ``'self.api_key`` is defined and a valid API key value. :raises AssertionError: Expiration timestamp did not have a UTC timezone specifier attacked to the end. :return: token string and expiration timestamp :rtype: str, datetime.datetime """ self._log.debug("Requesting new authorization token.") if self._api_key: r = requests.post(url_join(self._api_root, 'api_key/token'), data={'key': self._api_key}) r.raise_for_status() token = r.json()['authToken']['token'] expires = r.json()['authToken']['expires'] else: r = requests.get(url_join(self._api_root, 'token/session')) r.raise_for_status() token = r.json()['token'] expires = r.json()['expires'] return token, self._parse_expiration_timestamp(expires)
def test_url_join_restart_slash(self): nose.tools.assert_equal(url_join("foo", '/bar', 'foo'), '/bar/foo') nose.tools.assert_equal(url_join("foo", '/bar', '/foo'), '/foo') nose.tools.assert_equal(url_join("foo", '/bar', '/'), '/') nose.tools.assert_equal(url_join("foo", '/bar', '/', 'foo'), '/foo')
def test_url_join_protocol_handling(self): self.assertEqual(url_join('http://'), 'http://') self.assertEqual(url_join('http://', 'https://'), 'https://') # not that this will probably ever be an intended result, this tests # documented logic. self.assertEqual(url_join('http://', 'https:/'), 'http://https:')
def test_url_join_restart_slash(self): self.assertEqual(url_join("foo", '/bar', 'foo'), '/bar/foo') self.assertEqual(url_join("foo", '/bar', '/foo'), '/foo') self.assertEqual(url_join("foo", '/bar', '/'), '/') self.assertEqual(url_join("foo", '/bar', '/', 'foo'), '/foo')
def test_url_join_simple(self): # One parameter self.assertEqual(url_join('foo'), 'foo') # multi-parameter, convert to str self.assertEqual( url_join('https://foo', 'bar', 1, 'six'), 'https://foo/bar/1/six' )
def test_url_join_restart_mixed(self): self.assertEqual( url_join("foo", '/bar', 'https://foo'), 'https://foo' ) self.assertEqual( url_join("foo", 'https://foo', '/bar'), '/bar' )
def test_url_join_empty_leading(self): self.assertEqual( url_join('', 'foo'), 'foo' ) self.assertEqual( url_join('', '', 'foo', 'bar'), 'foo/bar' )
def test_url_join_empty_middle(self): self.assertEqual( url_join('foo', '', 'bar'), 'foo/bar' ) self.assertEqual( url_join('foo', '', '', 'bar', '', 'baz'), 'foo/bar/baz' )
def test_url_join_empty_mixed(self): self.assertEqual( url_join('', '', 'b', '', 'c', '', '', 'a', '', ''), 'b/c/a/' ) self.assertEqual( url_join('', '', 'b', '', 'c', '', '', 'a'), 'b/c/a' )
def test_url_join_restart_protocol(self): # Test restarting url concat due to protocol header self.assertEqual( url_join('http://a.b.c', 'ftp://ba.c'), 'ftp://ba.c' ) self.assertEqual( url_join('', 'a', 'b', 'https://', 'bar', ''), 'https://bar/' )
def test_url_join_empty_all(self): self.assertEqual(url_join(''), '') self.assertEqual( url_join('', ''), '' ) self.assertEqual( url_join('', '', ''), '' )
def get_bytes(self): """ Get the bytes of the file stored in girder. :return: Get the byte stream for this data element. :rtype: bytes :raises AssertionError: Content received not the expected length in bytes (header field vs. content length). :raises requests.HTTPError: If the ID does not refer to a file in Girder. """ # Check if token has expired, if so get new one # Download file bytes from girder self._log.debug("Getting bytes for file ID %s", self.file_id) token_header = self.token_manager.get_requests_header() r = requests.get(url_join(self.api_root, 'file', self.file_id, 'download'), params={'contentDisposition': 'inline'}, headers=token_header) r.raise_for_status() content = r.content expected_length = int(r.headers['Content-Length']) assert len(content) == expected_length, \ "Content received no the expected length: %d != %d (expected)" \ % (len(content), expected_length) return content
def q(offset, limit): r = requests.get(url_join(api_root, 'item', item_id, 'files'), params={'offset': offset, 'limit': limit}, headers=tm.get_requests_header()) r.raise_for_status() for file_model in r.json(): yield file_model['_id'], file_model['mimeType']
def q(offset, limit): r = requests.get(url_join(api_root, 'item', item_id, 'files'), params={'offset': offset, 'limit': limit}, headers=tm.get_requests_header()) r.raise_for_status() for file_model in r.json(): yield file_model['_id'], file_model['mimeType']
def test_url_join_protocol_handling(self): self.assertEqual( url_join('http://'), 'http://' ) self.assertEqual( url_join('http://', 'https://'), 'https://' ) # not that this will probably ever be an intended result, this tests # documented logic. self.assertEqual( url_join('http://', 'https:/'), 'http://https:' )
def q(offset, limit): r = requests.get(url_join(api_root, 'item'), params={'folderId': folder_id, 'offset': offset, 'limit': limit}, headers=tm.get_requests_header()) r.raise_for_status() for i_model in r.json(): yield i_model['_id']
def q(offset, limit): r = requests.get(url_join(api_root, 'item'), params={'folderId': folder_id, 'offset': offset, 'limit': limit}, headers=tm.get_requests_header()) r.raise_for_status() for i_model in r.json(): yield i_model['_id']
def q(offset, limit): r = requests.get(url_join(api_root, 'folder'), params={'parentType': 'folder', 'parentId': folder_id, 'offset': offset, 'limit': limit}, headers=tm.get_requests_header()) r.raise_for_status() for f_model in r.json(): yield f_model['_id']
def q(offset, limit): r = requests.get(url_join(api_root, 'folder'), params={'parentType': 'folder', 'parentId': folder_id, 'offset': offset, 'limit': limit}, headers=tm.get_requests_header()) r.raise_for_status() for f_model in r.json(): yield f_model['_id']
def test_url_join_restart_slash(self): self.assertEqual( url_join("foo", '/bar', 'foo'), '/bar/foo' ) self.assertEqual( url_join("foo", '/bar', '/foo'), '/foo' ) self.assertEqual( url_join("foo", '/bar', '/'), '/' ) self.assertEqual( url_join("foo", '/bar', '/', 'foo'), '/foo' )
def content_type(self): # Check if token has expired, if so get new one # Get file model, which has mimetype info if self._content_type is None: self._log.debug("Getting content type for file ID %s", self.file_id) token_header = self.token_manager.get_requests_header() r = requests.get(url_join(self.api_root, 'file', self.file_id), headers=token_header) r.raise_for_status() self._content_type = r.json()['mimeType'] return self._content_type
def get_file_model(self): """ Get the file model json from the server. Returns None if the file does not exist on the server. :return: file model model as a dictionary :rtype: dict | None """ r = requests.get(url_join(self.api_root, 'file', self.file_id), headers=self.token_manager.get_requests_header()) if r.status_code == 400: return None # Exception for any other status r.raise_for_status() return r.json()
def test_url_join_restart_mixed(self): self.assertEqual(url_join("foo", '/bar', 'https://foo'), 'https://foo') self.assertEqual(url_join("foo", 'https://foo', '/bar'), '/bar')
def test_url_join_empty_all(self): nose.tools.assert_equal(url_join(''), '') nose.tools.assert_equal(url_join('', ''), '') nose.tools.assert_equal(url_join('', '', ''), '')
def test_url_join_empty_leading(self): nose.tools.assert_equal(url_join('', 'foo'), 'foo') nose.tools.assert_equal(url_join('', '', 'foo', 'bar'), 'foo/bar')
def test_url_join_empty_mixed(self): self.assertEqual(url_join('', '', 'b', '', 'c', '', '', 'a', '', ''), 'b/c/a/') self.assertEqual(url_join('', '', 'b', '', 'c', '', '', 'a'), 'b/c/a')
def test_url_join_empty_last(self): self.assertEqual(url_join('foo', ''), "foo/")
def test_url_join_empty_middle(self): self.assertEqual(url_join('foo', '', 'bar'), 'foo/bar') self.assertEqual(url_join('foo', '', '', 'bar', '', 'baz'), 'foo/bar/baz')
def test_url_join_empty_last(self): self.assertEqual( url_join('foo', ''), "foo/" )
def test_url_join_empty_all(self): self.assertEqual(url_join(''), '') self.assertEqual(url_join('', ''), '') self.assertEqual(url_join('', '', ''), '')
def test_url_join_empty_middle(self): nose.tools.assert_equal(url_join('foo', '', 'bar'), 'foo/bar') nose.tools.assert_equal(url_join('foo', '', '', 'bar', '', 'baz'), 'foo/bar/baz')
def test_url_join_restart_mixed(self): nose.tools.assert_equal(url_join("foo", '/bar', 'https://foo'), 'https://foo') nose.tools.assert_equal(url_join("foo", 'https://foo', '/bar'), '/bar')
def test_url_join_empty_last(self): nose.tools.assert_equal(url_join('foo', ''), "foo/")
def test_url_join_empty_leading(self): self.assertEqual(url_join('', 'foo'), 'foo') self.assertEqual(url_join('', '', 'foo', 'bar'), 'foo/bar')
def test_url_join_empty_mixed(self): nose.tools.assert_equal( url_join('', '', 'b', '', 'c', '', '', 'a', '', ''), 'b/c/a/') nose.tools.assert_equal(url_join('', '', 'b', '', 'c', '', '', 'a'), 'b/c/a')