def test_get_response_contents():
    response = Response()
    response.status_code = 111
    response._content = b'foo'
    with pytest.raises(TravisError) as exception_info:
        get_response_contents(response)
    assert str(exception_info.value) == '[111] foo'

    response._content = ''
    with pytest.raises(TravisError) as exception_info:
        get_response_contents(response)
    assert str(exception_info.value) == textwrap.dedent('''
        [111] Unexpected error
            Possible reasons are:
             - Communication with Travis CI has failed.
             - Insufficient permissions.
             - Invalid contents returned.
    ''')[1:]

    response._content = b'{"error": "foo"}'
    with pytest.raises(TravisError) as exception_info:
        get_response_contents(response)
    assert str(exception_info.value) == '[111] foo'

    response.status_code = 200
    assert get_response_contents(response) == {'error': 'foo'}
def test_file_update():
    account = Account.create_from_data(json.loads(helpers.account))
    file_data = json.loads(helpers.file_data)
    file_obj = File.create_from_data(file_data, parent_resource=account)
    with patch('kloudless.resources.request') as mock_req:
        resp = Response()
        new_data = file_data.copy()
        new_data['name'] = 'NewFileName'
        resp._content = json.dumps(new_data)
        account_resp = Response()
        account_resp._content = helpers.account
        mock_req.side_effect = (resp,account_resp)
        file_obj.name = 'NewFileName'
        file_obj.parent_id = 'root'
        file_obj.save()
        expected_calls = [
                          # This is updating the file
                          call(file_obj._api_session.patch,
                               'accounts/%s/files/%s' % (account.id,
                                                         file_data['id']),
                               params={},
                               data={'name': u'NewFileName',
                                     'parent_id': 'root'},
                               configuration=file_obj._configuration),
                          # This is refreshing the parent resource
                          call(account._api_session.get,
                               'accounts/%s' % account.id,
                               configuration=account._configuration),
                         ]
        mock_req.assert_has_calls(expected_calls)
Beispiel #3
0
    def test_domain_cookie(self, mock_request):
        """
        Test domain cookies without 'Path'
        """
        response0 = Response()
        response0.status_code = 200
        response0._content = 'Mocked response content'
        response0.headers = {
            'Set-Cookie': 'a=apple; Domain=fruits.com;, ' +
                          'b=banana; Domain=fruits.com;, ' +
                          'c=citrus; Domain=mediterranean.fruits.com;, ' +
                          'm=mango; Domain=tropical.fruits.com;'
        }
        response0.url = 'http://mediterranean.fruits.com/path0'
        mock_request.return_value = response0

        get('http://mediterranean.fruits.com/path0')    # Initial request. No cookies.
        mock_request.assert_called_with('GET', 'http://mediterranean.fruits.com/path0', allow_redirects=True)

        get('http://mediterranean.fruits.com/path1')    # 'a', 'b', and 'c' cookies should be present.
        mock_request.assert_called_with('GET', 'http://mediterranean.fruits.com/path1', allow_redirects=True,
            cookies={'a': 'apple', 'b': 'banana', 'c': 'citrus'})

        get('http://tropical.fruits.com/path2')         # 'a', 'b', and 'm' cookies should be present.
        mock_request.assert_called_with('GET', 'http://tropical.fruits.com/path2', allow_redirects=True,
            cookies={'a': 'apple', 'b': 'banana', 'm': 'mango'})

        get('http://www.fruits.com/path3')              # 'a' and 'b' cookies should be present.
        mock_request.assert_called_with('GET', 'http://www.fruits.com/path3', allow_redirects=True,
            cookies={'a': 'apple', 'b': 'banana'})

        get('http://fruits.com/path4')                  # 'a' and 'b' cookies should be present.
        mock_request.assert_called_with('GET', 'http://fruits.com/path4', allow_redirects=True,
            cookies={'a': 'apple', 'b': 'banana'})

        get('http://animals.com/path5')                 # Different domain. No cookies should be present.
        mock_request.assert_called_with('GET', 'http://animals.com/path5', allow_redirects=True)

        response1 = Response()
        response1.status_code = 200
        response1._content = 'Mocked response content'
        response1.headers = {
            'Set-Cookie': 'a=apricot; Domain=fruits.com;, ' +
                          'b=; Domain=fruits.com;, ' +
                          'm=melon; Domain=tropical.fruits.com;'
        }
        response1.url = 'http://tropical.fruits.com/path0'
        mock_request.return_value = response1

        get('http://tropical.fruits.com/path0')         # Still called with previous cookies
        mock_request.assert_called_with('GET', 'http://tropical.fruits.com/path0', allow_redirects=True,
            cookies={'a': 'apple', 'b': 'banana', 'm': 'mango'})

        get('http://tropical.fruits.com/path1')         # called with new cookies
        mock_request.assert_called_with('GET', 'http://tropical.fruits.com/path1', allow_redirects=True,
            cookies={'a': 'apricot', 'b': '', 'm': 'melon'})
Beispiel #4
0
    def forward_request(self, method, path, data, headers):
        data = json.loads(to_str(data))

        if random.random() < config.DYNAMODB_ERROR_PROBABILITY:
            return error_response_throughput()

        action = headers.get('X-Amz-Target')
        if action in ('%s.PutItem' % ACTION_PREFIX, '%s.UpdateItem' % ACTION_PREFIX, '%s.DeleteItem' % ACTION_PREFIX):
            # find an existing item and store it in a thread-local, so we can access it in return_response,
            # in order to determine whether an item already existed (MODIFY) or not (INSERT)
            ProxyListenerDynamoDB.thread_local.existing_item = find_existing_item(data)
        elif action == '%s.UpdateTimeToLive' % ACTION_PREFIX:
            # TODO: TTL status is maintained/mocked but no real expiry is happening for items
            response = Response()
            response.status_code = 200
            self._table_ttl_map[data['TableName']] = {
                'AttributeName': data['TimeToLiveSpecification']['AttributeName'],
                'Status': data['TimeToLiveSpecification']['Enabled']
            }
            response._content = json.dumps({'TimeToLiveSpecification': data['TimeToLiveSpecification']})
            fix_headers_for_updated_response(response)
            return response
        elif action == '%s.DescribeTimeToLive' % ACTION_PREFIX:
            response = Response()
            response.status_code = 200
            if data['TableName'] in self._table_ttl_map:
                if self._table_ttl_map[data['TableName']]['Status']:
                    ttl_status = 'ENABLED'
                else:
                    ttl_status = 'DISABLED'
                response._content = json.dumps({
                    'TimeToLiveDescription': {
                        'AttributeName': self._table_ttl_map[data['TableName']]['AttributeName'],
                        'TimeToLiveStatus': ttl_status
                    }
                })
            else:  # TTL for dynamodb table not set
                response._content = json.dumps({'TimeToLiveDescription': {'TimeToLiveStatus': 'DISABLED'}})
            fix_headers_for_updated_response(response)
            return response
        elif action == '%s.TagResource' % ACTION_PREFIX or action == '%s.UntagResource' % ACTION_PREFIX:
            response = Response()
            response.status_code = 200
            response._content = ''  # returns an empty body on success.
            fix_headers_for_updated_response(response)
            return response
        elif action == '%s.ListTagsOfResource' % ACTION_PREFIX:
            response = Response()
            response.status_code = 200
            response._content = json.dumps({'Tags': []})  # TODO: mocked and returns an empty list of tags for now.
            fix_headers_for_updated_response(response)
            return response

        return True
Beispiel #5
0
    def test_ready_to_view(self, mock_request):
        response = Response()
        response.status_code = 200
        response._content = json.dumps(test_document)
        mock_request.return_value = response

        result = self.api.ready_to_view(test_document['id'])
        self.assertIsNotNone(result)
        self.assertTrue(bool(result))

        response._content = json.dumps(dict(test_document, status='error'))

        result = self.api.ready_to_view(test_document['id'])
        self.assertFalse(bool(result))
Beispiel #6
0
    def test_expired_cookie(self, mock_request):
        response = Response()
        response.status_code = 200
        response._content = 'Mocked response content'
        response.headers = {
            'Set-Cookie': 'a=apple; expires=%s;, b=banana; max-age=6' % _getdate(future=3)
        }
        response.url = 'http://www.fruits.com'
        mock_request.return_value = response

        get('http://www.fruits.com/path')

        dummycache_cache.datetime.now = lambda: datetime.now() + timedelta(seconds=1)
        get('http://www.fruits.com/path')
        mock_request.assert_called_with('GET', 'http://www.fruits.com/path', allow_redirects=True,
            cookies={'a': 'apple', 'b': 'banana'})

        dummycache_cache.datetime.now = lambda: datetime.now() + timedelta(seconds=4)
        get('http://www.fruits.com/path')
        mock_request.assert_called_with('GET', 'http://www.fruits.com/path', allow_redirects=True,
            cookies={'b': 'banana'})

        dummycache_cache.datetime.now = lambda: datetime.now() + timedelta(seconds=11)
        get('http://www.fruits.com/path')
        mock_request.assert_called_with('GET', 'http://www.fruits.com/path', allow_redirects=True)
    def test_disable_default_redirect_cache(self, mock_request):
        """
        Test disable default redirect cache (by setting default redirect cache to None)
        """
        response0 = Response()
        response0.url = 'http://www.test.com/neverseemeagain'
        response0.status_code = 301
        response0.headers = {
            'Location': 'http://www.test.com/redirect_here',
        }

        response1 = Response()
        response1.url = 'http://www.test.com/redirect_here'
        response1.status_code = 200
        response1._content = 'Mocked response content'
        response1.headers = {
            'Vary': 'Accept',
        }
        response1.history = [response0]

        mock_request.return_value = response1

        get('http://www.test.com/neverseemeagain')
        mock_request.assert_called_with('GET', 'http://www.test.com/neverseemeagain', allow_redirects=True)
        get('http://www.test.com/neverseemeagain')
        mock_request.assert_called_with('GET', 'http://www.test.com/redirect_here', allow_redirects=True)

        set_default_redirect_cache(None)

        get('http://www.test.com/neverseemeagain')
        mock_request.assert_called_with('GET', 'http://www.test.com/neverseemeagain', allow_redirects=True)
        get('http://www.test.com/neverseemeagain')
        mock_request.assert_called_with('GET', 'http://www.test.com/neverseemeagain', allow_redirects=True)
    def test_disable_default_cache(self, mock_request):
        """
        Test disable default cache (by setting default cache to None)
        """
        response = Response()
        response.status_code = 200
        response._content = 'Mocked response content'
        response.headers = {
            'Cache-Control': 'max-age=100',
            }
        mock_request.return_value = response

        get('http://www.test.com/path')
        self.assertEqual(mock_request.call_count, 1)
        get('http://www.test.com/path')
        self.assertEqual(mock_request.call_count, 1)

        set_default_cache(None)

        get('http://www.test.com/path')
        self.assertEqual(mock_request.call_count, 2)
        get('http://www.test.com/path')
        self.assertEqual(mock_request.call_count, 3)
        get('http://www.test.com/path')
        self.assertEqual(mock_request.call_count, 4)
Beispiel #9
0
    def get(self):
        r = Response()
        try:
            r._content = open(self.url).read()
            #: Integer Code of responded HTTP Status.
            r.status_code = 200
        except IOError as e:
            r.status_code = 404
            raise ConnectionError(e)

        r._content_consumed = True

        #: Final URL location of Response.
        r.url = self.url

        #: Resulting :class:`HTTPError` of request, if one occurred.
        self.error = None

        #: Encoding to decode with when accessing r.content.
        self.encoding = None

        #: The :class:`Request <Request>` that created the Response.
        self.request = self

        # Return the response.
        return r
Beispiel #10
0
    def test_get_301_only_once(self, mock_request):
        response0 = Response()
        response0.url = 'http://www.test.com/neverseemeagain'
        response0.status_code = 301
        response0.headers = {
            'Location': 'http://www.test.com/redirect_here',
        }

        response1 = Response()
        response1.url = 'http://www.test.com/redirect_here'
        response1.status_code = 200
        response1._content = 'Mocked response content'
        response1.headers = {
            'Vary': 'Accept',
        }
        response1.history = [response0]

        mock_request.return_value = response1


        r = get('http://www.test.com/neverseemeagain')
        self.assertEqual(mock_request.call_count, 1)
        mock_request.assert_called_with('GET', 'http://www.test.com/neverseemeagain', allow_redirects=True)
        self.assertEqual(r.status_code, 200)

        #assert we not make request to 301 again
        r = get('http://www.test.com/neverseemeagain')
        self.assertEqual(mock_request.call_count, 2)
        mock_request.assert_called_with('GET', 'http://www.test.com/redirect_here', allow_redirects=True)
        self.assertEqual(r.status_code, 200)
 def request(method, url, **kwargs):
     response = Response()
     response.status_code = 403
     response.encoding = 'application/json'
     response._content = '"Unauthorized: upload_view failed permission check"'
     response.reason = '403 Forbidden'
     return response
    def test_iterate(self):
        with patch.object(self.rsm, 'mk_request') as mock:
            response = Response()
            response.encoding = 'utf-8'
            response._content = json.dumps([{
                'uuid': 'person1',
                'age': 1,
                'name': 'person1'
            }, {
                'uuid': 'person2',
                'age': 2,
                'name': 'person2'
            }])
            mock.return_value = response
            person1, person2 = self.rsm.iterate(TestPerson)
            self.assertEqual(person1.uuid, 'person1')
            self.assertEqual(person1.age, 1)
            self.assertEqual(person1.name, 'person1')
            self.assertTrue(person1.is_read_only())

            self.assertEqual(person2.uuid, 'person2')
            self.assertEqual(person2.age, 2)
            self.assertEqual(person2.name, 'person2')
            self.assertTrue(person2.is_read_only())

            mock.assert_called_with(
                'GET', 'http://www.example.org/repos/foo/%s.json' % (
                    fqcn(TestPerson),))
    def test_cookie(self, mock_request):
        """
        Test that each session has its own cookie "sandbox".
        """
        response = Response()
        response.status_code = 200
        response._content = 'Mocked response content'
        response.headers = {'Set-Cookie': 'name=value'}
        response.url = 'http://www.test.com/path'

        mock_request.return_value = response

        s0 = Session()
        s1 = Session()

        # s0 make requests
        s0.get('http://www.test.com/path')
        mock_request.assert_called_with('GET', 'http://www.test.com/path', allow_redirects=True)
        s0.get('http://www.test.com/path')
        mock_request.assert_called_with('GET', 'http://www.test.com/path', allow_redirects=True, cookies={'name': 'value'})

        # s1 make requests
        s1.get('http://www.test.com/path')
        mock_request.assert_called_with('GET', 'http://www.test.com/path', allow_redirects=True)
        s1.get('http://www.test.com/path')
        mock_request.assert_called_with('GET', 'http://www.test.com/path', allow_redirects=True, cookies={'name': 'value'})

        # s0 make requests again
        s0.get('http://www.test.com/path')
        mock_request.assert_called_with('GET', 'http://www.test.com/path', allow_redirects=True, cookies={'name': 'value'})
        s0.get('http://www.test.com/path')
        mock_request.assert_called_with('GET', 'http://www.test.com/path', allow_redirects=True, cookies={'name': 'value'})
Beispiel #14
0
    def testCreateUrl(self):
        def _mock_get(url, **kwargs):
            self.assertEquals("http://demo.piwik.org/", url)
            self.assertEquals(
                {
                    "module": "API",
                    "method": "Referers.getKeywords",
                    "format": "json",
                    "idSite": 3,
                    "date": "yesterday",
                    "period": "day",
                    "token_auth": "1231",
                    "filter_limit": 10,
                },
                kwargs.get("params"),
            )
            return response

        response = Response()
        response.status_code = 200
        response._content = '{"result":"success", "xxx":"aaa"}'
        old_get = requests.get

        try:
            requests.get = _mock_get

            api = PiwikAPI("http://demo.piwik.org/", "1231")
            self.assertEquals(
                {"result": "success", "xxx": "aaa"},
                api.Referers.getKeywords(idSite=3, date="yesterday", period="day", filter_limit=10),
            )
        finally:
            requests.get = old_get
Beispiel #15
0
def make_mock_response(filename, status_code=None):
    response = Response()
    response.status_code = status_code or 200
    response.encoding = "utf-8"
    with open(os.path.join(resource_location, filename)) as text:
        response._content = text.read().encode()
    return response
 def request(method, url, **kwargs):
     if 'data' in kwargs:
         kwargs['params'] = kwargs.pop('data')
     elif 'params' in kwargs and kwargs['params'] is None:
         kwargs.pop('params')
     auth = None
     if 'auth' in kwargs:
         auth = kwargs.pop('auth')
     for i in ['auth', 'allow_redirects', 'stream']:
         if i in kwargs:
             kwargs.pop(i)
     if app.app.registry.api_url in url:
         if auth:
             authorization = api.authorization
             api.authorization = ('Basic', auth)
         resp = api._gen_request(method.upper(), url, expect_errors=True, **kwargs)
         if auth:
             api.authorization = authorization
     else:
         resp = app._gen_request(method.upper(), url, expect_errors=True, **kwargs)
     response = Response()
     response.status_code = resp.status_int
     response.headers = CaseInsensitiveDict(getattr(resp, 'headers', {}))
     response.encoding = get_encoding_from_headers(response.headers)
     response.raw = resp
     response._content = resp.body
     response.reason = resp.status
     if isinstance(url, bytes):
         response.url = url.decode('utf-8')
     else:
         response.url = url
     response.request = resp.request
     return response
Beispiel #17
0
    def create_mock_response(cls, status_code, data, filter=None, order_by=None, page=None, error=None, headers=None):
        """ Build a fake response

            Args:
                status_code: the status code
                data: the NURESTObject
                filter: a string representing a filter
                order_by: a string representing an order by
                page: a page number

        """

        content = None
        if type(data) == list:
            content = list()
            for obj in data:
                content.append(obj.to_dict())
        elif data:
            content = data.to_dict()

        response = Response()
        response.status_code = status_code
        response._content = json.dumps(content)

        if headers:
            response.headers = headers

        return MagicMock(return_value=response)
Beispiel #18
0
    def test_filter_existing__removes_duplicates(self, http):
        fixture_data = load_fixture('comments_current.json')
        response = Response()
        response._content = fixture_data
        http.return_value = response

        gh = Github()
        problems = Problems()
        review = Review(gh, 2)
        filename_1 = "Routing/Filter/AssetCompressor.php"
        filename_2 = "View/Helper/AssetCompressHelper.php"

        problems.add(filename_1, 87, 'A pithy remark')
        problems.add(filename_1, 87, 'Something different')
        problems.add(filename_2, 88, 'I <3 it')
        problems.add(filename_2, 89, 'Not such a good comment')

        review.load_comments()
        review.remove_existing(problems)

        res = problems.all(filename_1)
        eq_(1, len(res))
        expected = Comment(filename_1, 87, 87, 'Something different')
        eq_(res[0], expected)

        res = problems.all(filename_2)
        eq_(1, len(res))
        expected = Comment(filename_2, 88, 88, 'I <3 it')
        eq_(res[0], expected)
Beispiel #19
0
    def test_get_301_circular_redirect(self, mock_request):
        response0 = Response()
        response0.url = 'http://www.test.com/path0'
        response0.status_code = 301
        response0.headers = {'Location': 'http://www.test.com/path1'}

        response1 = Response()
        response1.url = 'http://www.test.com/path1'
        response1.status_code = 301
        response1.headers = {'Location': 'http://www.test.com/path0'}

        response2 = Response()
        response2.url = 'http://www.test.com/path2'
        response2.status_code = 200
        response2._content = 'Mocked response content'
        response2.history = [response0, response1]

        mock_request.return_value = response2


        r = get('http://www.test.com/path0')
        self.assertEqual(mock_request.call_count, 1)
        self.assertEqual(r.status_code, 200)
        self.assertEqual(r.content, 'Mocked response content')

        with self.assertRaises(TooManyRedirects):
            get('http://www.test.com/path0')
Beispiel #20
0
    def test_get_document_content(self, mock_request):
        response = Response()
        response.status_code = 200
        response.headers['Content-Type'] = 'text/plain'
        response._content = 'test'
        response.raw = six.BytesIO('test')
        mock_request.return_value = response

        stream = six.BytesIO()
        mimetype = self.api.get_document_content(stream, test_document['id'])
        self.assertEqual(stream.getvalue(), response._content)
        self.assertEqual(mimetype, response.headers['Content-Type'])

        stream = six.BytesIO()
        self.api.get_document_content(stream,
                                      test_document['id'],
                                      extension='.pdf')
        self.assertEqual(stream.getvalue(), response._content)

        stream = six.BytesIO()
        self.api.get_document_content(stream,
                                      test_document['id'],
                                      extension='.zip')
        self.assertEqual(stream.getvalue(), response._content)

        stream = six.BytesIO()
        # allowed only .zip and .pdf extensions
        self.assertRaises(ValueError,
                          self.api.get_document_content,
                          stream,
                          test_document['id'],
                          extension='.docx')
 def get_response(content, code=200):
     response = Response()
     if hasattr(content, 'read'):
         content = content.read()
     response._content = content
     response.status_code = code
     return response
Beispiel #22
0
    def test_create_session(self, mock_request):
        response = Response()
        response.status_code = 201
        response._content = json.dumps(test_session)
        mock_request.return_value = response

        expires_at = datetime.datetime.utcnow()
        doc_id = test_document['id']
        result = self.api.create_session(doc_id,
                                         duration=600,
                                         expires_at=expires_at,
                                         is_downloadable=True,
                                         is_text_selectable=True)
        self.assertIsNotNone(result)
        self.assertEqual(result['id'], test_session['id'])

        data = {
            'document_id': doc_id,
            'duration': 600,
            'expires_at': expires_at.replace(microsecond=0).isoformat(),
            'is_downloadable': True,
            'is_text_selectable': True
        }
        headers = {'Content-Type': 'application/json'}
        url = urljoin(API_URL, 'sessions')
        mock_request.assert_called_with('POST', url,
                                        data=json.dumps(data),
                                        headers=headers)
Beispiel #23
0
    def test_crate_document_from_url(self, mock_request):
        response = Response()
        response.status_code = 201
        response._content = json.dumps(test_document)
        mock_request.return_value = response

        result = self.api.create_document(url=test_url,
                                          name='Test Document',
                                          thumbnails='100x100,200x200',
                                          non_svg=False)
        self.assertIsNotNone(result)
        self.assertEqual(result, test_document)

        data = {
            'url': test_url,
            'name': 'Test Document',
            'thumbnails': '100x100,200x200',
        }
        headers = {'Content-Type': 'application/json'}
        url = urljoin(API_URL, 'documents')
        mock_request.assert_called_with('POST', url,
                                        data=json.dumps(data),
                                        headers=headers)

        # url of file param is required
        self.assertRaises(ValueError, self.api.create_document)
    def test_get_301_thrice(self, mock_get):
        response0 = Response()
        response0.url = 'http://www.test.com/neverseemeagain'
        response0.status_code = 301
        response0.headers = {
            'Location': 'http://www.test.com/redirect_1',
            }

        response1 = Response()
        response1.url = 'http://www.test.com/redirect_1'
        response1.status_code = 301
        response1.headers = {
            'Location': 'http://www.test.com/redirect_2',
            }

        response2 = Response()
        response2.url = 'http://www.test.com/redirect_2'
        response2.status_code = 301
        response2.headers = {
            'Location': 'http://www.test.com/redirect_3',
            }

        response3 = Response()
        response3.url = 'http://www.test.com/redirect_3'
        response3.status_code = 200
        response3._content = 'Mocked response content'
        response3.headers = {
            'Vary': 'Accept',
            }
        response3.history = [response0, response1, response2]

        mock_get.return_value = response3


        r = get('http://www.test.com/neverseemeagain')
        self.assertEqual(mock_get.call_count, 1)
        mock_get.assert_called_with('http://www.test.com/neverseemeagain')
        self.assertEqual(r.status_code, 200)

        #assert we not make request to 301 again
        r = get('http://www.test.com/neverseemeagain')
        self.assertEqual(mock_get.call_count, 2)
        mock_get.assert_called_with('http://www.test.com/redirect_3')
        self.assertEqual(r.status_code, 200)

        r = get('http://www.test.com/redirect_1')
        self.assertEqual(mock_get.call_count, 3)
        mock_get.assert_called_with('http://www.test.com/redirect_3')
        self.assertEqual(r.status_code, 200)

        r = get('http://www.test.com/redirect_2')
        self.assertEqual(mock_get.call_count, 4)
        mock_get.assert_called_with('http://www.test.com/redirect_3')
        self.assertEqual(r.status_code, 200)

        r = get('http://www.test.com/redirect_3')
        self.assertEqual(mock_get.call_count, 5)
        mock_get.assert_called_with('http://www.test.com/redirect_3')
        self.assertEqual(r.status_code, 200)
Beispiel #25
0
 def test_response_verified_with_auth_header(self, mk_accept, mk_resp):
     response = Response()
     response.headers['Server-Authorization'] = 'xyz'
     response.headers['Content-Type'] = 'text/plain'
     response._content = b'Authorized'
     mk_resp.return_value = response
     exec_cmd(url=self.url, creds=self.credentials_id)
     self.assertTrue(mk_accept.called)
def test_account_list():
    with patch('kloudless.resources.request') as mock_req:
        resp = Response()
        resp._content = helpers.account_list
        mock_req.return_value = resp
        accounts = kloudless.Account().all()
        assert len(accounts) > 0
        assert all([isinstance(x, Account) for x in accounts])
 def get(self, module_name, **get_params):
     try:
         response = Response()
         response._content = MOCK_RESPONSES[module_name]
         response.status_code = 200
         return response
     except KeyError:
         raise KeyError(u"Module {} not mocked!".format(module_name))
Beispiel #28
0
def generate_fake_error_response(msg, status_code=401, encoding='utf-8'):
    r = Response()
    r.status_code = status_code
    r.encoding = encoding
    r.raw = RequestsStringIO(msg.encode())
    r._content_consumed = True
    r._content = r.raw.read()
    return r
Beispiel #29
0
def mock_get(*args, **kwargs):
    '''
    Returns an empty response object after 2 seconds
    '''
    empty_response = Response()
    empty_response.status_code = 200
    empty_response._content = '{}'
    time.sleep(2)
    return empty_response
Beispiel #30
0
    def test_get_document(self, mock_request):
        response = Response()
        response.status_code = 200
        response._content = json.dumps(test_document)
        mock_request.return_value = response

        result = self.api.get_document(test_document['id'])
        self.assertIsNotNone(result)
        self.assertEqual(result, test_document)
Beispiel #31
0
def modify_and_forward(method=None,
                       path=None,
                       data_bytes=None,
                       headers=None,
                       forward_base_url=None,
                       listeners=None,
                       request_handler=None,
                       client_address=None,
                       server_address=None):
    """ This is the central function that coordinates the incoming/outgoing messages
        with the proxy listeners (message interceptors). """

    listeners = ProxyListener.DEFAULT_LISTENERS + (listeners or [])
    listeners = [lis for lis in listeners if lis]
    data = data_bytes

    def is_full_url(url):
        return re.match(r'[a-zA-Z]+://.+', url)

    if is_full_url(path):
        path = path.split('://', 1)[1]
        path = '/%s' % (path.split('/', 1)[1] if '/' in path else '')
    proxy_url = '%s%s' % (forward_base_url, path)

    for listener in listeners:
        proxy_url = listener.get_forward_url(method, path, data,
                                             headers) or proxy_url

    target_url = path
    if not is_full_url(target_url):
        target_url = '%s%s' % (forward_base_url, target_url)

    # update original "Host" header (moto s3 relies on this behavior)
    if not headers.get('Host'):
        headers['host'] = urlparse(target_url).netloc
    headers['X-Forwarded-For'] = build_x_forwarded_for(headers, client_address,
                                                       server_address)

    response = None
    modified_request = None

    # update listener (pre-invocation)
    for listener in listeners:
        listener_result = listener.forward_request(method=method,
                                                   path=path,
                                                   data=data,
                                                   headers=headers)
        if isinstance(listener_result, Response):
            response = listener_result
            break
        if isinstance(listener_result, LambdaResponse):
            response = listener_result
            break
        if isinstance(listener_result, dict):
            response = Response()
            response._content = json.dumps(json_safe(listener_result))
            response.headers['Content-Type'] = APPLICATION_JSON
            response.status_code = 200
            break
        elif isinstance(listener_result, Request):
            modified_request = listener_result
            data = modified_request.data
            headers = modified_request.headers
            break
        elif http2_server.get_async_generator_result(listener_result):
            return listener_result
        elif listener_result is not True:
            # get status code from response, or use Bad Gateway status code
            code = listener_result if isinstance(listener_result, int) else 503
            response = Response()
            response.status_code = code
            response._content = ''
            response.headers['Content-Length'] = '0'
            append_cors_headers(response)
            return response

    # perform the actual invocation of the backend service
    if response is None:
        headers['Connection'] = headers.get('Connection') or 'close'
        data_to_send = data_bytes
        request_url = proxy_url
        if modified_request:
            if modified_request.url:
                request_url = '%s%s' % (forward_base_url, modified_request.url)
            data_to_send = modified_request.data

        # make sure we drop "chunked" transfer encoding from the headers to be forwarded
        headers.pop('Transfer-Encoding', None)
        requests_method = getattr(requests, method.lower())
        response = requests_method(request_url,
                                   data=data_to_send,
                                   headers=headers,
                                   stream=True,
                                   verify=False)

    # prevent requests from processing response body (e.g., to pass-through gzip encoded content unmodified)
    pass_raw = ((hasattr(response, '_content_consumed')
                 and not response._content_consumed)
                or response.headers.get('content-encoding') in ['gzip'])
    if pass_raw and getattr(response, 'raw', None):
        new_content = response.raw.read()
        if new_content:
            response._content = new_content

    # update listener (post-invocation)
    if listeners:
        update_listener = listeners[-1]
        kwargs = {
            'method': method,
            'path': path,
            'data': data_bytes,
            'headers': headers,
            'response': response
        }
        if 'request_handler' in inspect.getargspec(
                update_listener.return_response)[0]:
            # some listeners (e.g., sqs_listener.py) require additional details like the original
            # request port, hence we pass in a reference to this request handler as well.
            kwargs['request_handler'] = request_handler

        updated_response = update_listener.return_response(**kwargs)
        if isinstance(updated_response, Response):
            response = updated_response

    # allow pre-flight CORS headers by default
    from localstack.services.s3.s3_listener import ProxyListenerS3
    is_s3_listener = any([
        isinstance(service_listener, ProxyListenerS3)
        for service_listener in listeners
    ])
    if not is_s3_listener:
        append_cors_headers(response)

    return response
Beispiel #32
0
    def forward_request(self, method, path, data, headers):
        LOGGER.debug('forward_request - method: "%s"' % method)
        modified_data = None

        # If this request contains streaming v4 authentication signatures, strip them from the message
        # Related isse: https://github.com/localstack/localstack/issues/98
        # TODO we should evaluate whether to replace moto s3 with scality/S3:
        # https://github.com/scality/S3/issues/237
        if headers.get('x-amz-content-sha256'
                       ) == 'STREAMING-AWS4-HMAC-SHA256-PAYLOAD':
            modified_data = strip_chunk_signatures(data)

        # POST requests to S3 may include a "${filename}" placeholder in the
        # key, which should be replaced with an actual file name before storing.
        if method == 'POST':
            original_data = modified_data or data
            expanded_data = multipart_content.expand_multipart_filename(
                original_data, headers)
            if expanded_data is not original_data:
                modified_data = expanded_data

        # If no content-type is provided, 'binary/octet-stream' should be used
        # src: https://docs.aws.amazon.com/AmazonS3/latest/API/RESTObjectPUT.html
        if method == 'PUT' and not headers.get('content-type'):
            headers['content-type'] = 'binary/octet-stream'

        # persist this API call to disk
        persistence.record('s3', method, path, data, headers)

        parsed = urlparse.urlparse(path)
        query = parsed.query
        path = parsed.path
        object_path = parsed.path
        LOGGER.debug('forward_request - query: "%s"' % query)
        LOGGER.debug('forward_request - path: "%s"' % path)
        LOGGER.debug('forward_request - host: "%s"' % headers['host'])
        LOGGER.debug('forward_request - object_path: "%s"' % object_path)

        # Check bucket name in host
        bucket = None
        hostname_parts = headers['host'].split('.')
        if len(hostname_parts) > 1:
            bucket = hostname_parts[0]

        # No bucket name in host, check in path.
        if (not bucket or len(bucket) == 0):
            bucket = get_bucket_name(path, headers)

        LOGGER.debug('forward_request - Bucket name: "%s"' % bucket)

        query_map = urlparse.parse_qs(query)
        if query == 'notification' or 'notification' in query_map:
            LOGGER.debug('forward_request - query: "%s"' % query)
            response = Response()
            response.status_code = 200
            if method == 'GET':
                # TODO check if bucket exists
                result = '<NotificationConfiguration xmlns="%s">' % XMLNS_S3
                if bucket in S3_NOTIFICATIONS:
                    notif = S3_NOTIFICATIONS[bucket]
                    for dest in NOTIFICATION_DESTINATION_TYPES:
                        if dest in notif:
                            dest_dict = {
                                '%sConfiguration' % dest: {
                                    'Id': uuid.uuid4(),
                                    dest: notif[dest],
                                    'Event': notif['Event'],
                                    'Filter': notif['Filter']
                                }
                            }
                            result += xmltodict.unparse(dest_dict,
                                                        full_document=False)
                result += '</NotificationConfiguration>'
                response._content = result

            if method == 'PUT':
                LOGGER.debug('forward_request - method: "%s"' % method)
                parsed = xmltodict.parse(data)
                notif_config = parsed.get('NotificationConfiguration')
                S3_NOTIFICATIONS.pop(bucket, None)
                for dest in NOTIFICATION_DESTINATION_TYPES:
                    LOGGER.debug(
                        'forward_request - NOTIFICATION_DESTINATION_TYPES - dest: "%s"'
                        % dest)
                    config = notif_config.get('%sConfiguration' % (dest))
                    if config:
                        events = config.get('Event')
                        if isinstance(events, six.string_types):
                            events = [events]
                        event_filter = config.get('Filter', {})
                        # make sure FilterRule is an array
                        s3_filter = _get_s3_filter(event_filter)
                        if s3_filter and not isinstance(
                                s3_filter.get('FilterRule', []), list):
                            s3_filter['FilterRule'] = [s3_filter['FilterRule']]
                        # create final details dict
                        notification_details = {
                            'Id': config.get('Id'),
                            'Event': events,
                            dest: config.get(dest),
                            'Filter': event_filter
                        }
                        # TODO: what if we have multiple destinations - would we overwrite the config?
                        LOGGER.debug(
                            'forward_request - S3_NOTIFICATIONS - bucket: "%s"'
                            % bucket)
                        S3_NOTIFICATIONS[bucket] = clone(notification_details)

            # return response for ?notification request
            return response

        if query == 'cors' or 'cors' in query_map:
            if method == 'GET':
                return get_cors(bucket)
            if method == 'PUT':
                return set_cors(bucket, data)
            if method == 'DELETE':
                return delete_cors(bucket)

        if query == 'lifecycle' or 'lifecycle' in query_map:
            if method == 'GET':
                return get_lifecycle(bucket)
            if method == 'PUT':
                return set_lifecycle(bucket, data)

        LOGGER.debug('forward_request - query_map: "%s"' % query_map)
        if method == 'PUT' and 'x-amz-meta-filename' in query_map and bucket is not None and object_path is not None:
            unique_id = get_unique_id(bucket, object_path)
            set_user_defined_metadata(unique_id, query_map)

        if modified_data:
            return Request(data=modified_data, headers=headers, method=method)
        return True
Beispiel #33
0
def modify_and_forward(method=None,
                       path=None,
                       data_bytes=None,
                       headers=None,
                       forward_base_url=None,
                       listeners=None,
                       request_handler=None,
                       client_address=None,
                       server_address=None):
    listeners = [lis for lis in (listeners or []) if lis]
    data = data_bytes

    def is_full_url(url):
        return re.match(r'[a-zA-Z]+://.+', url)

    if is_full_url(path):
        path = path.split('://', 1)[1]
        path = '/%s' % (path.split('/', 1)[1] if '/' in path else '')
    proxy_url = '%s%s' % (forward_base_url, path)

    for listener in listeners:
        proxy_url = listener.get_forward_url(method, path, data,
                                             headers) or proxy_url

    target_url = path
    if not is_full_url(target_url):
        target_url = '%s%s' % (forward_base_url, target_url)

    # update original "Host" header (moto s3 relies on this behavior)
    if not headers.get('Host'):
        headers['host'] = urlparse(target_url).netloc
    if 'localhost.atlassian.io' in headers.get('Host'):
        headers['host'] = 'localhost'
    headers['X-Forwarded-For'] = build_x_forwarded_for(headers, client_address,
                                                       server_address)

    response = None
    modified_request = None

    # update listener (pre-invocation)
    for listener in listeners:
        listener_result = listener.forward_request(method=method,
                                                   path=path,
                                                   data=data,
                                                   headers=headers)
        if isinstance(listener_result, Response):
            response = listener_result
            break
        if isinstance(listener_result, LambdaResponse):
            response = listener_result
            break
        if isinstance(listener_result, dict):
            response = Response()
            response._content = json.dumps(json_safe(listener_result))
            response.headers['Content-Type'] = APPLICATION_JSON
            response.status_code = 200
            break
        elif isinstance(listener_result, Request):
            modified_request = listener_result
            data = modified_request.data
            headers = modified_request.headers
            break
        elif listener_result is not True:
            # get status code from response, or use Bad Gateway status code
            code = listener_result if isinstance(listener_result, int) else 503
            response = Response()
            response._content = ''
            # TODO add CORS headers here?
            response.headers['Content-Length'] = '0'
            response.status_code = code
            return response

    # perform the actual invocation of the backend service
    if response is None:
        headers['Connection'] = headers.get('Connection') or 'close'
        data_to_send = data_bytes
        request_url = proxy_url
        if modified_request:
            if modified_request.url:
                request_url = '%s%s' % (forward_base_url, modified_request.url)
            data_to_send = modified_request.data

        requests_method = getattr(requests, method.lower())
        response = requests_method(request_url,
                                   data=data_to_send,
                                   headers=headers,
                                   stream=True)

        # prevent requests from processing response body
        if not response._content_consumed and response.raw:
            response._content = response.raw.read()

    # update listener (post-invocation)
    if listeners:
        update_listener = listeners[-1]
        kwargs = {
            'method': method,
            'path': path,
            'data': data_bytes,
            'headers': headers,
            'response': response
        }
        if 'request_handler' in inspect.getargspec(
                update_listener.return_response)[0]:
            # some listeners (e.g., sqs_listener.py) require additional details like the original
            # request port, hence we pass in a reference to this request handler as well.
            kwargs['request_handler'] = request_handler
        updated_response = update_listener.return_response(**kwargs)
        if isinstance(updated_response, Response):
            response = updated_response

    return response
Beispiel #34
0
 def get_response(self, content, code=200):
     response = Response()
     response._content = content
     response.status_code = code
     return response
Beispiel #35
0
def make_error(message, code=400):
    response = Response()
    response.status_code = code
    response._content = json.dumps({'message': message})
    return response
    def forward_request(self, method, path, data, headers):

        regex2 = r'^/restapis/([A-Za-z0-9_\-]+)/([A-Za-z0-9_\-]+)/%s/(.*)$' % PATH_USER_REQUEST
        if re.match(regex2, path):
            search_match = re.search(regex2, path)
            api_id = search_match.group(1)
            path = '/%s' % search_match.group(3)
            try:
                integration = aws_stack.get_apigateway_integration(api_id, method, path)
            except Exception as e:
                msg = ('API Gateway endpoint "%s" for method "%s" not found' % (path, method))
                LOGGER.warning(msg)
                return make_error(msg, 404)
            uri = integration.get('uri')
            if method == 'POST' and integration['type'] in ['AWS']:
                if uri.endswith('kinesis:action/PutRecords'):
                    template = integration['requestTemplates'][APPLICATION_JSON]
                    new_request = aws_stack.render_velocity_template(template, data)

                    # forward records to target kinesis stream
                    headers = aws_stack.mock_aws_request_headers(service='kinesis')
                    headers['X-Amz-Target'] = kinesis_listener.ACTION_PUT_RECORDS
                    result = common.make_http_request(url=TEST_KINESIS_URL,
                        method='POST', data=new_request, headers=headers)
                    return result
                else:
                    msg = 'API Gateway action uri "%s" not yet implemented' % uri
                    LOGGER.warning(msg)
                    return make_error(msg, 404)

            elif integration['type'] == 'AWS_PROXY':
                if uri.startswith('arn:aws:apigateway:') and ':lambda:path' in uri:
                    func_arn = uri.split(':lambda:path')[1].split('functions/')[1].split('/invocations')[0]
                    data_str = json.dumps(data) if isinstance(data, dict) else data
                    result = lambda_api.process_apigateway_invocation(func_arn, path, data_str, headers)
                    response = Response()
                    parsed_result = json.loads(result)
                    response.status_code = int(parsed_result['statusCode'])
                    response.headers.update(parsed_result['headers'])
                    response_body = parsed_result['body']
                    response._content = json.dumps(response_body)
                    return response
                else:
                    msg = 'API Gateway action uri "%s" not yet implemented' % uri
                    LOGGER.warning(msg)
                    return make_error(msg, 404)

            elif integration['type'] == 'HTTP':
                function = getattr(requests, method.lower())
                if isinstance(data, dict):
                    data = json.dumps(data)
                result = function(integration['uri'], data=data, headers=headers)
                return result

            else:
                msg = ('API Gateway integration type "%s" for method "%s" not yet implemented' %
                    (integration['type'], method))
                LOGGER.warning(msg)
                return make_error(msg, 404)

            return 200

        if re.match(PATH_REGEX_AUTHORIZERS, path):
            return handle_authorizers(method, path, data, headers)

        return True
Beispiel #37
0
    def forward_request(self, method, path, data, headers):

        if path.split('?')[0] == '/health':
            return serve_health_endpoint(method, path, data)
        if method == 'POST' and path == '/graph':
            return serve_resource_graph(data)

        # kill the process if we receive this header
        headers.get(HEADER_KILL_SIGNAL) and os._exit(0)

        target = headers.get('x-amz-target', '')
        auth_header = headers.get('authorization', '')
        host = headers.get('host', '')
        headers[HEADER_LOCALSTACK_EDGE_URL] = 'https://%s' % host

        # extract API details
        api, port, path, host = get_api_from_headers(headers, method=method, path=path, data=data)

        set_default_region_in_headers(headers)

        if port and int(port) < 0:
            return 404

        if not port:
            api, port = get_api_from_custom_rules(method, path, data, headers) or (api, port)

        if not port:
            if method == 'OPTIONS':
                return 200

            if api in ['', None, '_unknown_']:
                truncated = truncate(data)
                if auth_header or target or data or path not in ['/', '/favicon.ico']:
                    LOG.info(('Unable to find forwarding rule for host "%s", path "%s %s", '
                        'target header "%s", auth header "%s", data "%s"') % (
                            host, method, path, target, auth_header, truncated))
            else:
                LOG.info(('Unable to determine forwarding port for API "%s" - please '
                    'make sure this API is enabled via the SERVICES configuration') % api)
            response = Response()
            response.status_code = 404
            response._content = '{"status": "running"}'
            return response

        if api and not headers.get('Authorization'):
            headers['Authorization'] = aws_stack.mock_aws_request_headers(api)['Authorization']

        headers['Host'] = host
        if isinstance(data, dict):
            data = json.dumps(data)

        encoding_type = headers.get('content-encoding') or ''
        if encoding_type.upper() == GZIP_ENCODING and api is not S3:
            headers.set('content-encoding', IDENTITY_ENCODING)
            data = gzip.decompress(data)

        lock_ctx = BOOTSTRAP_LOCK
        if persistence.API_CALLS_RESTORED or is_internal_call_context(headers):
            lock_ctx = empty_context_manager()

        with lock_ctx:
            return do_forward_request(api, method, path, data, headers, port=port)
Beispiel #38
0
from requests.models import Response

resp = Response()
resp.code = "expired"
resp.error_type = "expired"
resp.status_code = 200
resp._content = b'[{"samples":[{"cmoId":"WD0497_A","concentration":"66.69598385697347 nM","dropOffDate":1569511413554,"estimatedPurity":0.0,"gender":"F","initialPool":"Pool-09443_U-09525_M-10222-10243-B2_1","organism":"Human","project":"10222","recipe":"10X_Genomics_GeneExpression-5","recordId":5970794,"specimenType":"Resection","tumorOrNormal":"Tumor","userId":"WD0497_A","volume":97.00131869365791,"yield":20.801476925232173,"qc":{"sampleName":"WD0497_A","run":"DIANA_0129_AHLKMMDMXX","qcStatus":"Under-Review","qcUnits":"ng/uL","quantUnits":"ng/uL","mskq":0.0,"meanTargetCoverage":0.0,"percentAdapters":2.08E-4,"percentDuplication":0.836002,"percentOffBait":0.0,"percentTarget10x":0.001968,"percentTarget30x":5.24E-4,"percentTarget40x":3.72E-4,"percentTarget80x":1.65E-4,"percentTarget100x":1.26E-4,"percentRibosomalBases":0.0,"percentUtrBases":0.0,"percentCodingBases":0.0,"percentIntronicBases":0.0,"percentIntergenicBases":0.0,"percentMrnaBases":0.0,"quantIt":20.801476925232173,"qcControl":20.801476925232173,"readsDuped":7976944,"readsExamined":12713073,"recordId":5980440,"zeroCoveragePercent":0.0,"totalReads":413835472,"reviewed":false,"unpairedReadsExamined":194204663,"unmapped":204122266,"pct_EXC_TOTAL":0.979497,"pct_EXC_BASEQ":5.31E-4,"pct_EXC_DUPE":0.541235,"pct_EXC_MAPQ":0.316231,"mean_COVERAGE":0.117737},"species":"Human","baseId":"10222_2","vol":97.00131869365791,"cancerType":"WDLS","expName":"WD0497_A","requestedNumberOfReads":200,"coverageTarget":0},{"cmoId":"WD0497_D","concentration":"65.74430018359855 nM","dropOffDate":1569511413635,"estimatedPurity":0.0,"gender":"F","initialPool":"Pool-09443_U-09525_M-10222-10243-B2_1","organism":"Human","project":"10222","recipe":"10X_Genomics_GeneExpression-5","recordId":5970797,"specimenType":"Resection","tumorOrNormal":"Tumor","userId":"WD0497_D","volume":96.95791118862812,"yield":20.28882252525888,"qc":{"sampleName":"WD0497_D","run":"DIANA_0129_AHLKMMDMXX","qcStatus":"Under-Review","qcUnits":"ng/uL","quantUnits":"ng/uL","mskq":0.0,"meanTargetCoverage":0.0,"percentAdapters":6.1E-5,"percentDuplication":0.844542,"percentOffBait":0.0,"percentTarget10x":0.001891,"percentTarget30x":4.87E-4,"percentTarget40x":3.43E-4,"percentTarget80x":1.53E-4,"percentTarget100x":1.18E-4,"percentRibosomalBases":0.0,"percentUtrBases":0.0,"percentCodingBases":0.0,"percentIntronicBases":0.0,"percentIntergenicBases":0.0,"percentMrnaBases":0.0,"quantIt":20.28882252525888,"qcControl":20.28882252525888,"readsDuped":8652436,"readsExamined":13172579,"recordId":5980442,"zeroCoveragePercent":0.0,"totalReads":410115482,"reviewed":false,"unpairedReadsExamined":191885162,"unmapped":201557810,"pct_EXC_TOTAL":0.98062,"pct_EXC_BASEQ":5.12E-4,"pct_EXC_DUPE":0.549248,"pct_EXC_MAPQ":0.316151,"mean_COVERAGE":0.110757},"species":"Human","baseId":"10222_5","vol":96.95791118862812,"cancerType":"WDLS","expName":"WD0497_D","requestedNumberOfReads":200,"coverageTarget":0},{"cmoId":"WD0497_E","concentration":"55.82409078790071 nM","dropOffDate":1569511413661,"estimatedPurity":0.0,"gender":"F","initialPool":"Pool-09443_U-09525_M-10222-10243-B2_1","organism":"Human","project":"10222","recipe":"10X_Genomics_GeneExpression-5","recordId":5970798,"specimenType":"Resection","tumorOrNormal":"Tumor","userId":"WD0497_E","volume":96.41731737718963,"yield":16.604306399135027,"qc":{"sampleName":"WD0497_E","run":"DIANA_0129_AHLKMMDMXX","qcStatus":"Under-Review","qcUnits":"ng/uL","quantUnits":"ng/uL","mskq":0.0,"meanTargetCoverage":0.0,"percentAdapters":7.1E-5,"percentDuplication":0.871703,"percentOffBait":0.0,"percentTarget10x":0.002155,"percentTarget30x":4.18E-4,"percentTarget40x":2.47E-4,"percentTarget80x":9.3E-5,"percentTarget100x":6.9E-5,"percentRibosomalBases":0.0,"percentUtrBases":0.0,"percentCodingBases":0.0,"percentIntronicBases":0.0,"percentIntergenicBases":0.0,"percentMrnaBases":0.0,"quantIt":16.604306399135027,"qcControl":16.604306399135027,"readsDuped":8785956,"readsExamined":11969346,"recordId":5980444,"zeroCoveragePercent":0.0,"totalReads":409556248,"reviewed":false,"unpairedReadsExamined":192808778,"unmapped":204920904,"pct_EXC_TOTAL":0.986745,"pct_EXC_BASEQ":3.01E-4,"pct_EXC_DUPE":0.505638,"pct_EXC_MAPQ":0.383773,"mean_COVERAGE":0.074355},"species":"Human","baseId":"10222_6","vol":96.41731737718963,"cancerType":"WDLS","expName":"WD0497_E","requestedNumberOfReads":200,"coverageTarget":0},{"cmoId":"WD0497_F","concentration":"64.51378714645156 nM","dropOffDate":1569511413688,"estimatedPurity":0.0,"gender":"F","initialPool":"Pool-09443_U-09525_M-10222-10243-B2_1","organism":"Human","project":"10222","recipe":"10X_Genomics_GeneExpression-5","recordId":5970799,"specimenType":"Resection","tumorOrNormal":"Tumor","userId":"WD0497_F","volume":96.89988746830218,"yield":20.290321514732486,"qc":{"sampleName":"WD0497_F","run":"DIANA_0129_AHLKMMDMXX","qcStatus":"Under-Review","qcUnits":"ng/uL","quantUnits":"ng/uL","mskq":0.0,"meanTargetCoverage":0.0,"percentAdapters":8.0E-5,"percentDuplication":0.842214,"percentOffBait":0.0,"percentTarget10x":0.001801,"percentTarget30x":4.89E-4,"percentTarget40x":3.47E-4,"percentTarget80x":1.55E-4,"percentTarget100x":1.2E-4,"percentRibosomalBases":0.0,"percentUtrBases":0.0,"percentCodingBases":0.0,"percentIntronicBases":0.0,"percentIntergenicBases":0.0,"percentMrnaBases":0.0,"quantIt":20.290321514732486,"qcControl":20.290321514732486,"readsDuped":7218036,"readsExamined":12026699,"recordId":5980446,"zeroCoveragePercent":0.0,"totalReads":421578494,"reviewed":false,"unpairedReadsExamined":198762548,"unmapped":208203024,"pct_EXC_TOTAL":0.980392,"pct_EXC_BASEQ":5.11E-4,"pct_EXC_DUPE":0.541221,"pct_EXC_MAPQ":0.320876,"mean_COVERAGE":0.111746},"species":"Human","baseId":"10222_7","vol":96.89988746830218,"cancerType":"WDLS","expName":"WD0497_F","requestedNumberOfReads":200,"coverageTarget":0},{"cmoId":"WD0497_C","concentration":"59.98973085085059 nM","dropOffDate":1569511413608,"estimatedPurity":0.0,"gender":"F","initialPool":"Pool-09443_U-09525_M-10222-10243-B2_1","organism":"Human","project":"10222","recipe":"10X_Genomics_GeneExpression-5","recordId":5970796,"specimenType":"Resection","tumorOrNormal":"Tumor","userId":"WD0497_C","volume":96.66609606071997,"yield":19.18256829373756,"qc":{"sampleName":"WD0497_C","run":"DIANA_0129_AHLKMMDMXX","qcStatus":"Under-Review","qcUnits":"ng/uL","quantUnits":"ng/uL","mskq":0.0,"meanTargetCoverage":0.0,"percentAdapters":7.0E-5,"percentDuplication":0.841418,"percentOffBait":0.0,"percentTarget10x":0.001913,"percentTarget30x":5.25E-4,"percentTarget40x":3.74E-4,"percentTarget80x":1.67E-4,"percentTarget100x":1.28E-4,"percentRibosomalBases":0.0,"percentUtrBases":0.0,"percentCodingBases":0.0,"percentIntronicBases":0.0,"percentIntergenicBases":0.0,"percentMrnaBases":0.0,"quantIt":19.18256829373756,"qcControl":19.18256829373756,"readsDuped":6927549,"readsExamined":11671070,"recordId":5980452,"zeroCoveragePercent":0.0,"totalReads":422940574,"reviewed":false,"unpairedReadsExamined":199799217,"unmapped":209761214,"pct_EXC_TOTAL":0.98036,"pct_EXC_BASEQ":5.37E-4,"pct_EXC_DUPE":0.554465,"pct_EXC_MAPQ":0.308428,"mean_COVERAGE":0.114237},"species":"Human","baseId":"10222_4","vol":96.66609606071997,"cancerType":"WDLS","expName":"WD0497_C","requestedNumberOfReads":200,"coverageTarget":0},{"cmoId":"WD0497_B","concentration":"63.639255653530554 nM","dropOffDate":1569511413581,"estimatedPurity":0.0,"gender":"F","initialPool":"Pool-09443_U-09525_M-10222-10243-B2_1","organism":"Human","project":"10222","recipe":"10X_Genomics_GeneExpression-5","recordId":5970795,"specimenType":"Resection","tumorOrNormal":"Tumor","userId":"WD0497_B","volume":96.85728568088768,"yield":19.513844967404513,"qc":{"sampleName":"WD0497_B","run":"DIANA_0129_AHLKMMDMXX","qcStatus":"Under-Review","qcUnits":"ng/uL","quantUnits":"ng/uL","mskq":0.0,"meanTargetCoverage":0.0,"percentAdapters":5.9E-5,"percentDuplication":0.846217,"percentOffBait":0.0,"percentTarget10x":0.001769,"percentTarget30x":4.55E-4,"percentTarget40x":3.22E-4,"percentTarget80x":1.42E-4,"percentTarget100x":1.08E-4,"percentRibosomalBases":0.0,"percentUtrBases":0.0,"percentCodingBases":0.0,"percentIntronicBases":0.0,"percentIntergenicBases":0.0,"percentMrnaBases":0.0,"quantIt":19.513844967404513,"qcControl":19.513844967404513,"readsDuped":7968183,"readsExamined":12276967,"recordId":5980454,"zeroCoveragePercent":0.0,"totalReads":413608044,"reviewed":false,"unpairedReadsExamined":194527055,"unmapped":204659450,"pct_EXC_TOTAL":0.981681,"pct_EXC_BASEQ":5.17E-4,"pct_EXC_DUPE":0.55015,"pct_EXC_MAPQ":0.316732,"mean_COVERAGE":0.105129},"species":"Human","baseId":"10222_3","vol":96.85728568088768,"cancerType":"WDLS","expName":"WD0497_B","requestedNumberOfReads":200,"coverageTarget":0},{"cmoId":"WD0497_N","concentration":"86.25916060710904 nM","dropOffDate":1569511413527,"estimatedPurity":0.0,"gender":"F","initialPool":"Pool-09443_U-09525_M-10222-10243-B2_1","organism":"Human","project":"10222","recipe":"10X_Genomics_GeneExpression-5","recordId":5970793,"specimenType":"Resection","tumorOrNormal":"Tumor","userId":"WD0497_N","volume":97.68140567804787,"yield":25.43035641972788,"qc":{"sampleName":"WD0497_N","run":"DIANA_0129_AHLKMMDMXX","qcStatus":"Under-Review","qcUnits":"ng/uL","quantUnits":"ng/uL","mskq":0.0,"meanTargetCoverage":0.0,"percentAdapters":1.18E-4,"percentDuplication":0.845099,"percentOffBait":0.0,"percentTarget10x":0.00242,"percentTarget30x":4.36E-4,"percentTarget40x":2.91E-4,"percentTarget80x":1.13E-4,"percentTarget100x":8.5E-5,"percentRibosomalBases":0.0,"percentUtrBases":0.0,"percentCodingBases":0.0,"percentIntronicBases":0.0,"percentIntergenicBases":0.0,"percentMrnaBases":0.0,"quantIt":25.43035641972788,"qcControl":25.43035641972788,"readsDuped":10491740,"readsExamined":14580042,"recordId":5980458,"zeroCoveragePercent":0.0,"totalReads":448304500,"reviewed":false,"unpairedReadsExamined":209572208,"unmapped":221375303,"pct_EXC_TOTAL":0.983397,"pct_EXC_BASEQ":3.72E-4,"pct_EXC_DUPE":0.533188,"pct_EXC_MAPQ":0.328546,"mean_COVERAGE":0.103041},"species":"Human","baseId":"10222_1","vol":97.68140567804787,"cancerType":"WDLS","expName":"WD0497_N","requestedNumberOfReads":200,"coverageTarget":0},{"cmoId":"WD0497_G","concentration":"67.49909786056523 nM","dropOffDate":1569511413714,"estimatedPurity":0.0,"gender":"F","initialPool":"Pool-09443_U-09525_M-10222-10243-B2_1","organism":"Human","project":"10222","recipe":"10X_Genomics_GeneExpression-5","recordId":5970800,"specimenType":"Resection","tumorOrNormal":"Tumor","userId":"WD0497_G","volume":97.03699743642285,"yield":20.653076967345164,"qc":{"sampleName":"WD0497_G","run":"DIANA_0129_AHLKMMDMXX","qcStatus":"Under-Review","qcUnits":"ng/uL","quantUnits":"ng/uL","mskq":0.0,"meanTargetCoverage":0.0,"percentAdapters":6.2E-5,"percentDuplication":0.844853,"percentOffBait":0.0,"percentTarget10x":0.001981,"percentTarget30x":5.37E-4,"percentTarget40x":3.82E-4,"percentTarget80x":1.71E-4,"percentTarget100x":1.31E-4,"percentRibosomalBases":0.0,"percentUtrBases":0.0,"percentCodingBases":0.0,"percentIntronicBases":0.0,"percentIntergenicBases":0.0,"percentMrnaBases":0.0,"quantIt":20.653076967345164,"qcControl":20.653076967345164,"readsDuped":8062518,"readsExamined":12943392,"recordId":5980472,"zeroCoveragePercent":0.0,"totalReads":445514566,"reviewed":false,"unpairedReadsExamined":209813891,"unmapped":219911521,"pct_EXC_TOTAL":0.980348,"pct_EXC_BASEQ":4.98E-4,"pct_EXC_DUPE":0.562793,"pct_EXC_MAPQ":0.301986,"mean_COVERAGE":0.120723},"species":"Human","baseId":"10222_8","vol":97.03699743642285,"cancerType":"WDLS","expName":"WD0497_G","requestedNumberOfReads":200,"coverageTarget":0},{"cmoId":"WD0497_H","concentration":"62.95351941949628 nM","dropOffDate":1569511413743,"estimatedPurity":0.0,"gender":"F","initialPool":"Pool-09443_U-09525_M-10222-10243-B2_1","organism":"Human","project":"10222","recipe":"10X_Genomics_GeneExpression-5","recordId":5970801,"specimenType":"Resection","tumorOrNormal":"Tumor","userId":"WD0497_H","volume":96.82305291516298,"yield":19.179570314790347,"qc":{"sampleName":"WD0497_H","run":"DIANA_0129_AHLKMMDMXX","qcStatus":"Under-Review","qcUnits":"ng/uL","quantUnits":"ng/uL","mskq":0.0,"meanTargetCoverage":0.0,"percentAdapters":6.8E-5,"percentDuplication":0.849128,"percentOffBait":0.0,"percentTarget10x":0.001851,"percentTarget30x":5.05E-4,"percentTarget40x":3.62E-4,"percentTarget80x":1.67E-4,"percentTarget100x":1.28E-4,"percentRibosomalBases":0.0,"percentUtrBases":0.0,"percentCodingBases":0.0,"percentIntronicBases":0.0,"percentIntergenicBases":0.0,"percentMrnaBases":0.0,"quantIt":19.179570314790347,"qcControl":19.179570314790347,"readsDuped":7440588,"readsExamined":12145352,"recordId":5980474,"zeroCoveragePercent":0.0,"totalReads":450486778,"reviewed":false,"unpairedReadsExamined":213098037,"unmapped":222883229,"pct_EXC_TOTAL":0.981707,"pct_EXC_BASEQ":4.54E-4,"pct_EXC_DUPE":0.560367,"pct_EXC_MAPQ":0.307718,"mean_COVERAGE":0.112684},"species":"Human","baseId":"10222_9","vol":96.82305291516298,"cancerType":"WDLS","expName":"WD0497_H","requestedNumberOfReads":200,"coverageTarget":0}],"requestId":"10222","investigator":"Rodrigo Gularte Merida","pi":"Samuel Singer","investigatorEmail":"*****@*****.**","piEmail":"*****@*****.**","analysisRequested":true,"recordId":0,"sampleNumber":16,"restStatus":"SUCCESS","autorunnable":true,"deliveryDate":[]}]'
def make_response(message):
    response = Response()
    response.status_code = 200
    response.headers['Content-Type'] = APPLICATION_JSON
    response._content = json.dumps(message)
    return response
Beispiel #40
0
def encoded_response(data, encoding_type=APPLICATION_JSON, status_code=200):
    response = Response()
    response.status_code = status_code
    response.headers.update({'content-type': encoding_type})
    response._content = encode_data(data, encoding_type)
    return response
Beispiel #41
0
    def return_response(self, method, path, data, headers, response):
        action = headers.get('X-Amz-Target', '').split('.')[-1]
        data, encoding_type = self.decode_content(data or '{}', True)
        response._content = self.replace_in_encoded(response.content or '')
        records = []
        if action in ('CreateStream', 'DeleteStream'):
            event_type = (event_publisher.EVENT_KINESIS_CREATE_STREAM
                          if action == 'CreateStream' else
                          event_publisher.EVENT_KINESIS_DELETE_STREAM)
            payload = {'n': event_publisher.get_hash(data.get('StreamName'))}
            if action == 'CreateStream':
                payload['s'] = data.get('ShardCount')
            event_publisher.fire_event(event_type, payload=payload)
        elif action == 'PutRecord':
            response_body = self.decode_content(response.content)
            # Note: avoid adding 'encryptionType':'NONE' in the event_record, as this breaks .NET Lambdas
            event_record = {
                'approximateArrivalTimestamp': epoch_timestamp(),
                'data': data['Data'],
                'partitionKey': data['PartitionKey'],
                'sequenceNumber': response_body.get('SequenceNumber')
            }
            event_records = [event_record]
            stream_name = data['StreamName']
            lambda_api.process_kinesis_records(event_records, stream_name)
        elif action == 'PutRecords':
            event_records = []
            response_body = self.decode_content(response.content)
            if 'Records' in response_body:
                response_records = response_body['Records']
                records = data['Records']
                for index in range(0, len(records)):
                    record = records[index]
                    # Note: avoid adding 'encryptionType':'NONE' in the event_record, as this breaks .NET Lambdas
                    event_record = {
                        'approximateArrivalTimestamp':
                        epoch_timestamp(),
                        'data':
                        record['Data'],
                        'partitionKey':
                        record['PartitionKey'],
                        'sequenceNumber':
                        response_records[index].get('SequenceNumber')
                    }
                    event_records.append(event_record)
                stream_name = data['StreamName']
                lambda_api.process_kinesis_records(event_records, stream_name)
        elif action == 'UpdateShardCount':
            # Currently kinesalite, which backs the Kinesis implementation for localstack, does
            # not support UpdateShardCount:
            # https://github.com/mhart/kinesalite/issues/61
            #
            # [Terraform](https://www.terraform.io) makes the call to UpdateShardCount when it
            # applies Kinesis resources. A Terraform run fails when this is not present.
            #
            # The code that follows just returns a successful response, bypassing the 400
            # response that kinesalite returns.
            #
            response = Response()
            response.status_code = 200
            content = {
                'CurrentShardCount': 1,
                'StreamName': data['StreamName'],
                'TargetShardCount': data['TargetShardCount']
            }
            response.encoding = 'UTF-8'
            response._content = json.dumps(content)
            return response
        elif action == 'GetRecords':
            sdk_v2 = self.sdk_is_v2(
                headers.get('User-Agent', '').split(' ')[0])
            results, encoding_type = self.decode_content(
                response.content, True)

            records = results.get('Records', [])
            if not records:
                return response

            for record in records:
                if sdk_v2:
                    record['ApproximateArrivalTimestamp'] = int(
                        record['ApproximateArrivalTimestamp'])
                if not isinstance(record['Data'], str):
                    # Remove double quotes from data written as bytes
                    # https://github.com/localstack/localstack/issues/3588
                    tmp = bytearray(record['Data']['data'])
                    if len(tmp) >= 2 and tmp[0] == tmp[-1] == b'"'[0]:
                        tmp = tmp[1:-1]

                    if encoding_type == APPLICATION_JSON:
                        record['Data'] = to_str(base64.b64encode(tmp))
                    else:
                        record['Data'] = to_str(tmp)

                else:
                    tmp = base64.b64decode(record['Data'])
                    if len(tmp) >= 2 and tmp[0] == tmp[-1] == b'"'[0]:
                        tmp = tmp[1:-1]
                    record['Data'] = to_str(base64.b64encode(tmp))

            response._content = cbor2.dumps(
                results) if encoding_type == APPLICATION_CBOR else json.dumps(
                    results)
            return response
Beispiel #42
0
    def forward_request(self, method, path, data, headers):
        result = handle_special_request(method, path, data, headers)
        if result is not None:
            return result

        # prepare request headers
        self.prepare_request_headers(headers)

        data_orig = data
        data = data or "{}"
        data = json.loads(to_str(data))
        ddb_client = aws_stack.connect_to_service("dynamodb")
        action = headers.get("X-Amz-Target", "")
        action = action.replace(ACTION_PREFIX, "")

        if self.should_throttle(action):
            return error_response_throughput()

        ProxyListenerDynamoDB.thread_local.existing_item = None
        if "TableName" in data:
            table_def = DynamoDBRegion.get().table_definitions.get(
                data["TableName"]) or {}

        if action == "CreateTable":
            # Check if table exists, to avoid error log output from DynamoDBLocal
            if self.table_exists(ddb_client, data["TableName"]):
                return error_response(
                    message="Table already created",
                    error_type="ResourceInUseException",
                    code=400,
                )

        elif action == "CreateGlobalTable":
            return create_global_table(data)

        elif action == "DescribeGlobalTable":
            return describe_global_table(data)

        elif action == "ListGlobalTables":
            return list_global_tables(data)

        elif action == "UpdateGlobalTable":
            return update_global_table(data)

        elif action in ("PutItem", "UpdateItem", "DeleteItem"):
            # find an existing item and store it in a thread-local, so we can access it in return_response,
            # in order to determine whether an item already existed (MODIFY) or not (INSERT)
            try:
                if has_event_sources_or_streams_enabled(data["TableName"]):
                    ProxyListenerDynamoDB.thread_local.existing_item = find_existing_item(
                        data)
            except Exception as e:
                if "ResourceNotFoundException" in str(e):
                    return get_table_not_found_error()
                raise

            # Fix incorrect values if ReturnValues==ALL_OLD and ReturnConsumedCapacity is
            # empty, see https://github.com/localstack/localstack/issues/2049
            if ((data.get("ReturnValues") == "ALL_OLD") or
                (not data.get("ReturnValues"))
                ) and not data.get("ReturnConsumedCapacity"):
                data["ReturnConsumedCapacity"] = "TOTAL"
                return Request(data=json.dumps(data),
                               method=method,
                               headers=headers)

        elif action == "DescribeTable":
            # Check if table exists, to avoid error log output from DynamoDBLocal
            if not self.table_exists(ddb_client, data["TableName"]):
                return get_table_not_found_error()

        elif action == "DeleteTable":
            # Check if table exists, to avoid error log output from DynamoDBLocal
            if not self.table_exists(ddb_client, data["TableName"]):
                return get_table_not_found_error()

        elif action == "BatchWriteItem":
            existing_items = []
            unprocessed_put_items = []
            unprocessed_delete_items = []
            for table_name in sorted(data["RequestItems"].keys()):
                for request in data["RequestItems"][table_name]:
                    for key in ["PutRequest", "DeleteRequest"]:
                        inner_request = request.get(key)
                        if inner_request:
                            if self.should_throttle(action):
                                if key == "PutRequest":
                                    unprocessed_put_items.append(inner_request)
                                elif key == "DeleteRequest":
                                    unprocessed_delete_items.append(
                                        inner_request)
                            else:
                                item = find_existing_item(
                                    inner_request, table_name)
                                existing_items.append(item)
            ProxyListenerDynamoDB.thread_local.existing_items = existing_items
            ProxyListenerDynamoDB.thread_local.unprocessed_put_items = unprocessed_put_items
            ProxyListenerDynamoDB.thread_local.unprocessed_delete_items = unprocessed_delete_items

        elif action == "Query":
            if data.get("IndexName"):
                if not is_index_query_valid(to_str(data["TableName"]),
                                            data.get("Select")):
                    return error_response(
                        message=
                        "One or more parameter values were invalid: Select type ALL_ATTRIBUTES "
                        "is not supported for global secondary index id-index because its projection "
                        "type is not ALL",
                        error_type="ValidationException",
                        code=400,
                    )

        elif action == "TransactWriteItems":
            existing_items = []
            for item in data["TransactItems"]:
                for key in ["Put", "Update", "Delete"]:
                    inner_item = item.get(key)
                    if inner_item:
                        existing_items.append(find_existing_item(inner_item))
            ProxyListenerDynamoDB.thread_local.existing_items = existing_items

        elif action == "UpdateTimeToLive":
            # TODO: TTL status is maintained/mocked but no real expiry is happening for items
            response = Response()
            response.status_code = 200
            self._table_ttl_map[data["TableName"]] = {
                "AttributeName":
                data["TimeToLiveSpecification"]["AttributeName"],
                "Status": data["TimeToLiveSpecification"]["Enabled"],
            }
            response._content = json.dumps(
                {"TimeToLiveSpecification": data["TimeToLiveSpecification"]})
            fix_headers_for_updated_response(response)
            return response

        elif action == "DescribeTimeToLive":
            response = Response()
            response.status_code = 200
            if data["TableName"] in self._table_ttl_map:
                if self._table_ttl_map[data["TableName"]]["Status"]:
                    ttl_status = "ENABLED"
                else:
                    ttl_status = "DISABLED"
                response._content = json.dumps({
                    "TimeToLiveDescription": {
                        "AttributeName":
                        self._table_ttl_map[data["TableName"]]
                        ["AttributeName"],
                        "TimeToLiveStatus":
                        ttl_status,
                    }
                })
            else:  # TTL for dynamodb table not set
                response._content = json.dumps({
                    "TimeToLiveDescription": {
                        "TimeToLiveStatus": "DISABLED"
                    }
                })

            fix_headers_for_updated_response(response)
            return response

        elif action in ("TagResource", "UntagResource"):
            response = Response()
            response.status_code = 200
            response._content = ""  # returns an empty body on success.
            fix_headers_for_updated_response(response)
            return response

        elif action == "ListTagsOfResource":
            response = Response()
            response.status_code = 200
            response._content = json.dumps({
                "Tags": [{
                    "Key": k,
                    "Value": v
                } for k, v in DynamoDBRegion.TABLE_TAGS.get(
                    data["ResourceArn"], {}).items()]
            })
            fix_headers_for_updated_response(response)
            return response

        elif action == "EnableKinesisStreamingDestination":
            # Check if table exists, to avoid error log output from DynamoDBLocal
            if not self.table_exists(ddb_client, data["TableName"]):
                return get_table_not_found_error()
            stream = is_kinesis_stream_exists(stream_arn=data["StreamArn"])
            if not stream:
                return error_response(
                    error_type="ValidationException",
                    message=
                    "User does not have a permission to use kinesis stream",
                )

            return dynamodb_enable_kinesis_streaming_destination(
                data, table_def)

        elif action == "DisableKinesisStreamingDestination":
            # Check if table exists, to avoid error log output from DynamoDBLocal
            if not self.table_exists(ddb_client, data["TableName"]):
                return get_table_not_found_error()
            stream = is_kinesis_stream_exists(stream_arn=data["StreamArn"])
            if not stream:
                return error_response(
                    error_type="ValidationException",
                    message=
                    "User does not have a permission to use kinesis stream",
                )

            return dynamodb_disable_kinesis_streaming_destination(
                data, table_def)

        elif action == "DescribeKinesisStreamingDestination":
            # Check if table exists, to avoid error log output from DynamoDBLocal
            if not self.table_exists(ddb_client, data["TableName"]):
                return get_table_not_found_error()
            response = aws_responses.requests_response({
                "KinesisDataStreamDestinations":
                table_def.get("KinesisDataStreamDestinations") or [],
                "TableName":
                data["TableName"],
            })
            return response

        return Request(data=data_orig, method=method, headers=headers)
Beispiel #43
0
 def forward_request(self, method, path, data, headers):
     records.append(data)
     response = Response()
     response.status_code = 200
     response._content = ''
     return response
    def forward_request(self, method, path, data, headers):
        result = handle_special_request(method, path, data, headers)
        if result is not None:
            return result

        # prepare request headers
        self.prepare_request_headers(headers)

        data_orig = data
        data = data or '{}'
        data = json.loads(to_str(data))
        ddb_client = aws_stack.connect_to_service('dynamodb')
        action = headers.get('X-Amz-Target', '')
        action = action.replace(ACTION_PREFIX, '')

        if self.should_throttle(action):
            return error_response_throughput()

        ProxyListenerDynamoDB.thread_local.existing_item = None

        if action == 'CreateTable':
            # Check if table exists, to avoid error log output from DynamoDBLocal
            if self.table_exists(ddb_client, data['TableName']):
                return error_response(message='Table already created',
                                      error_type='ResourceInUseException', code=400)

        elif action == 'CreateGlobalTable':
            return create_global_table(data)

        elif action == 'DescribeGlobalTable':
            return describe_global_table(data)

        elif action == 'ListGlobalTables':
            return list_global_tables(data)

        elif action == 'UpdateGlobalTable':
            return update_global_table(data)

        elif action in ('PutItem', 'UpdateItem', 'DeleteItem'):
            # find an existing item and store it in a thread-local, so we can access it in return_response,
            # in order to determine whether an item already existed (MODIFY) or not (INSERT)
            try:
                if has_event_sources_or_streams_enabled(data['TableName']):
                    ProxyListenerDynamoDB.thread_local.existing_item = find_existing_item(data)
            except Exception as e:
                if 'ResourceNotFoundException' in str(e):
                    return get_table_not_found_error()
                raise

            # Fix incorrect values if ReturnValues==ALL_OLD and ReturnConsumedCapacity is
            # empty, see https://github.com/localstack/localstack/issues/2049
            if ((data.get('ReturnValues') == 'ALL_OLD') or (not data.get('ReturnValues'))) \
                    and not data.get('ReturnConsumedCapacity'):
                data['ReturnConsumedCapacity'] = 'TOTAL'
                return Request(data=json.dumps(data), method=method, headers=headers)

        elif action == 'DescribeTable':
            # Check if table exists, to avoid error log output from DynamoDBLocal
            if not self.table_exists(ddb_client, data['TableName']):
                return get_table_not_found_error()

        elif action == 'DeleteTable':
            # Check if table exists, to avoid error log output from DynamoDBLocal
            if not self.table_exists(ddb_client, data['TableName']):
                return get_table_not_found_error()

        elif action == 'BatchWriteItem':
            existing_items = []
            for table_name in sorted(data['RequestItems'].keys()):
                for request in data['RequestItems'][table_name]:
                    for key in ['PutRequest', 'DeleteRequest']:
                        inner_request = request.get(key)
                        if inner_request:
                            existing_items.append(find_existing_item(inner_request, table_name))
            ProxyListenerDynamoDB.thread_local.existing_items = existing_items

        elif action == 'Query':
            if data.get('IndexName'):
                if not is_index_query_valid(to_str(data['TableName']), data.get('Select')):
                    return error_response(
                        message='One or more parameter values were invalid: Select type ALL_ATTRIBUTES '
                                'is not supported for global secondary index id-index because its projection '
                                'type is not ALL', error_type='ValidationException', code=400)

        elif action == 'TransactWriteItems':
            existing_items = []
            for item in data['TransactItems']:
                for key in ['Put', 'Update', 'Delete']:
                    inner_item = item.get(key)
                    if inner_item:
                        existing_items.append(find_existing_item(inner_item))
            ProxyListenerDynamoDB.thread_local.existing_items = existing_items

        elif action == 'UpdateTimeToLive':
            # TODO: TTL status is maintained/mocked but no real expiry is happening for items
            response = Response()
            response.status_code = 200
            self._table_ttl_map[data['TableName']] = {
                'AttributeName': data['TimeToLiveSpecification']['AttributeName'],
                'Status': data['TimeToLiveSpecification']['Enabled']
            }
            response._content = json.dumps({'TimeToLiveSpecification': data['TimeToLiveSpecification']})
            fix_headers_for_updated_response(response)
            return response

        elif action == 'DescribeTimeToLive':
            response = Response()
            response.status_code = 200
            if data['TableName'] in self._table_ttl_map:
                if self._table_ttl_map[data['TableName']]['Status']:
                    ttl_status = 'ENABLED'
                else:
                    ttl_status = 'DISABLED'
                response._content = json.dumps({
                    'TimeToLiveDescription': {
                        'AttributeName': self._table_ttl_map[data['TableName']]['AttributeName'],
                        'TimeToLiveStatus': ttl_status
                    }
                })
            else:  # TTL for dynamodb table not set
                response._content = json.dumps({'TimeToLiveDescription': {'TimeToLiveStatus': 'DISABLED'}})

            fix_headers_for_updated_response(response)
            return response

        elif action in ('TagResource', 'UntagResource'):
            response = Response()
            response.status_code = 200
            response._content = ''  # returns an empty body on success.
            fix_headers_for_updated_response(response)
            return response

        elif action == 'ListTagsOfResource':
            response = Response()
            response.status_code = 200
            response._content = json.dumps({
                'Tags': [
                    {'Key': k, 'Value': v}
                    for k, v in TABLE_TAGS.get(data['ResourceArn'], {}).items()
                ]
            })
            fix_headers_for_updated_response(response)
            return response

        return Request(data=data_orig, method=method, headers=headers)
Beispiel #45
0
    def forward_request(self, method, path, data, headers):
        data = data and json.loads(to_str(data))

        # Paths to match
        regex2 = r'^/restapis/([A-Za-z0-9_\-]+)/([A-Za-z0-9_\-]+)/%s/(.*)$' % PATH_USER_REQUEST

        if re.match(regex2, path):
            search_match = re.search(regex2, path)
            api_id = search_match.group(1)
            relative_path = '/%s' % search_match.group(3)
            try:
                integration = aws_stack.get_apigateway_integration(
                    api_id, method, path=relative_path)
                assert integration
            except Exception:
                # if we have no exact match, try to find an API resource that contains path parameters
                path_map = get_rest_api_paths(rest_api_id=api_id)
                try:
                    extracted_path, resource = get_resource_for_path(
                        path=relative_path, path_map=path_map)
                except Exception:
                    return make_error('Unable to find path %s' % path, 404)

                integrations = resource.get('resourceMethods', {})
                integration = integrations.get(method, {})
                integration = integration.get('methodIntegration')
                if not integration:

                    if method == 'OPTIONS' and 'Origin' in headers:
                        # default to returning CORS headers if this is an OPTIONS request
                        return get_cors_response(headers)

                    return make_error(
                        'Unable to find integration for path %s' % path, 404)

            uri = integration.get('uri')
            if method == 'POST' and integration['type'] == 'AWS':
                if uri.endswith('kinesis:action/PutRecords'):
                    template = integration['requestTemplates'][
                        APPLICATION_JSON]
                    new_request = aws_stack.render_velocity_template(
                        template, data)

                    # forward records to target kinesis stream
                    headers = aws_stack.mock_aws_request_headers(
                        service='kinesis')
                    headers[
                        'X-Amz-Target'] = kinesis_listener.ACTION_PUT_RECORDS
                    result = common.make_http_request(url=TEST_KINESIS_URL,
                                                      method='POST',
                                                      data=new_request,
                                                      headers=headers)
                    return result
                else:
                    msg = 'API Gateway action uri "%s" not yet implemented' % uri
                    LOGGER.warning(msg)
                    return make_error(msg, 404)

            elif integration['type'] == 'AWS_PROXY':
                if uri.startswith(
                        'arn:aws:apigateway:') and ':lambda:path' in uri:
                    func_arn = uri.split(':lambda:path')[1].split(
                        'functions/')[1].split('/invocations')[0]
                    data_str = json.dumps(data) if isinstance(data,
                                                              dict) else data

                    try:
                        path_params = extract_path_params(
                            path=relative_path, extracted_path=extracted_path)
                    except Exception:
                        path_params = {}
                    result = lambda_api.process_apigateway_invocation(
                        func_arn,
                        relative_path,
                        data_str,
                        headers,
                        path_params=path_params,
                        method=method,
                        resource_path=path)

                    if isinstance(result, FlaskResponse):
                        return flask_to_requests_response(result)

                    response = Response()
                    parsed_result = result if isinstance(
                        result, dict) else json.loads(result)
                    parsed_result = common.json_safe(parsed_result)
                    response.status_code = int(
                        parsed_result.get('statusCode', 200))
                    response.headers.update(parsed_result.get('headers', {}))
                    try:
                        if isinstance(parsed_result['body'], dict):
                            response._content = json.dumps(
                                parsed_result['body'])
                        else:
                            response._content = parsed_result['body']
                    except Exception:
                        response._content = '{}'
                    return response
                else:
                    msg = 'API Gateway action uri "%s" not yet implemented' % uri
                    LOGGER.warning(msg)
                    return make_error(msg, 404)

            elif integration['type'] == 'HTTP':
                function = getattr(requests, method.lower())
                if isinstance(data, dict):
                    data = json.dumps(data)
                result = function(integration['uri'],
                                  data=data,
                                  headers=headers)
                return result

            else:
                msg = (
                    'API Gateway integration type "%s" for method "%s" not yet implemented'
                    % (integration['type'], method))
                LOGGER.warning(msg)
                return make_error(msg, 404)

            return 200

        if re.match(PATH_REGEX_AUTHORIZERS, path):
            return handle_authorizers(method, path, data, headers)

        return True
Beispiel #46
0
    def forward_request(self, method, path, data, headers):

        modified_data = None

        # If this request contains streaming v4 authentication signatures, strip them from the message
        # Related isse: https://github.com/localstack/localstack/issues/98
        # TODO we should evaluate whether to replace moto s3 with scality/S3:
        # https://github.com/scality/S3/issues/237
        if headers.get('x-amz-content-sha256'
                       ) == 'STREAMING-AWS4-HMAC-SHA256-PAYLOAD':
            modified_data = strip_chunk_signatures(data)

        # persist this API call to disk
        persistence.record('s3', method, path, data, headers)

        parsed = urlparse.urlparse(path)
        query = parsed.query
        path = parsed.path
        bucket = path.split('/')[1]
        query_map = urlparse.parse_qs(query)
        if query == 'notification' or 'notification' in query_map:
            response = Response()
            response.status_code = 200
            if method == 'GET':
                # TODO check if bucket exists
                result = '<NotificationConfiguration xmlns="%s">' % XMLNS_S3
                if bucket in S3_NOTIFICATIONS:
                    notif = S3_NOTIFICATIONS[bucket]
                    for dest in ['Queue', 'Topic', 'CloudFunction']:
                        if dest in notif:
                            result += ('''<{dest}Configuration>
                                        <Id>{uid}</Id>
                                        <{dest}>{endpoint}</{dest}>
                                        <Event>{event}</Event>
                                    </{dest}Configuration>''').format(
                                dest=dest,
                                uid=uuid.uuid4(),
                                endpoint=S3_NOTIFICATIONS[bucket][dest],
                                event=S3_NOTIFICATIONS[bucket]['Event'])
                result += '</NotificationConfiguration>'
                response._content = result

            if method == 'PUT':
                tree = ET.fromstring(data)
                for dest in ['Queue', 'Topic', 'CloudFunction']:
                    config = tree.find('{%s}%sConfiguration' %
                                       (XMLNS_S3, dest))
                    if config is not None and len(config):
                        # TODO: what if we have multiple destinations - would we overwrite the config?
                        S3_NOTIFICATIONS[bucket] = {
                            'Id': get_xml_text(config, 'Id'),
                            'Event': get_xml_text(config, 'Event',
                                                  ns=XMLNS_S3),
                            # TODO extract 'Events' attribute (in addition to 'Event')
                            dest: get_xml_text(config, dest, ns=XMLNS_S3),
                        }

            # return response for ?notification request
            return response

        if query == 'cors' or 'cors' in query_map:
            if method == 'GET':
                return get_cors(bucket)
            if method == 'PUT':
                return set_cors(bucket, data)
            if method == 'DELETE':
                return delete_cors(bucket)

        if modified_data:
            return Request(data=modified_data, headers=headers, method=method)
        return True
Beispiel #47
0
def _create_response_object(content, code, headers):
    response = Response()
    response.status_code = code
    response.headers = headers
    response._content = content
    return response
Beispiel #48
0
    def forward_request(self, method, path, data, headers):
        if path.startswith('/shell') or method == 'GET':
            if path == '/shell':
                headers = {
                    'Refresh': '0; url=%s/shell/' % config.TEST_DYNAMODB_URL
                }
                return aws_responses.requests_response('', headers=headers)
            return True

        if method == 'OPTIONS':
            return 200

        if not data:
            data = '{}'
        data = json.loads(to_str(data))
        ddb_client = aws_stack.connect_to_service('dynamodb')
        action = headers.get('X-Amz-Target')

        if random.random() < config.DYNAMODB_ERROR_PROBABILITY:
            throttled = [
                '%s.%s' % (ACTION_PREFIX, a) for a in THROTTLED_ACTIONS
            ]
            if action in throttled:
                return error_response_throughput()

        ProxyListenerDynamoDB.thread_local.existing_item = None

        if action == '%s.CreateTable' % ACTION_PREFIX:
            # Check if table exists, to avoid error log output from DynamoDBLocal
            table_names = ddb_client.list_tables()['TableNames']
            if to_str(data['TableName']) in table_names:
                return 200

        elif action in ('%s.PutItem' % ACTION_PREFIX,
                        '%s.UpdateItem' % ACTION_PREFIX,
                        '%s.DeleteItem' % ACTION_PREFIX):
            # find an existing item and store it in a thread-local, so we can access it in return_response,
            # in order to determine whether an item already existed (MODIFY) or not (INSERT)
            try:
                ProxyListenerDynamoDB.thread_local.existing_item = find_existing_item(
                    data)
            except Exception as e:
                if 'ResourceNotFoundException' in str(e):
                    return get_table_not_found_error()
                raise

            # Fix incorrect values if ReturnValues==ALL_OLD and ReturnConsumedCapacity is
            # empty, see https://github.com/localstack/localstack/issues/2049
            if ((data.get('ReturnValues') == 'ALL_OLD') or (not data.get('ReturnValues'))) \
                    and not data.get('ReturnConsumedCapacity'):
                data['ReturnConsumedCapacity'] = 'TOTAL'
                return Request(data=json.dumps(data),
                               method=method,
                               headers=headers)

        elif action == '%s.DescribeTable' % ACTION_PREFIX:
            # Check if table exists, to avoid error log output from DynamoDBLocal
            table_names = ddb_client.list_tables()['TableNames']
            if to_str(data['TableName']) not in table_names:
                return get_table_not_found_error()

        elif action == '%s.DeleteTable' % ACTION_PREFIX:
            # Check if table exists, to avoid error log output from DynamoDBLocal
            table_names = ddb_client.list_tables()['TableNames']
            if to_str(data['TableName']) not in table_names:
                return get_table_not_found_error()

        elif action == '%s.BatchWriteItem' % ACTION_PREFIX:
            existing_items = []
            for table_name in sorted(data['RequestItems'].keys()):
                for request in data['RequestItems'][table_name]:
                    for key in ['PutRequest', 'DeleteRequest']:
                        inner_request = request.get(key)
                        if inner_request:
                            existing_items.append(
                                find_existing_item(inner_request, table_name))
            ProxyListenerDynamoDB.thread_local.existing_items = existing_items

        elif action == '%s.TransactWriteItems' % ACTION_PREFIX:
            existing_items = []
            for item in data['TransactItems']:
                for key in ['Put', 'Update', 'Delete']:
                    inner_item = item.get(key)
                    if inner_item:
                        existing_items.append(find_existing_item(inner_item))
            ProxyListenerDynamoDB.thread_local.existing_items = existing_items

        elif action == '%s.UpdateTimeToLive' % ACTION_PREFIX:
            # TODO: TTL status is maintained/mocked but no real expiry is happening for items
            response = Response()
            response.status_code = 200
            self._table_ttl_map[data['TableName']] = {
                'AttributeName':
                data['TimeToLiveSpecification']['AttributeName'],
                'Status': data['TimeToLiveSpecification']['Enabled']
            }
            response._content = json.dumps(
                {'TimeToLiveSpecification': data['TimeToLiveSpecification']})
            fix_headers_for_updated_response(response)
            return response

        elif action == '%s.DescribeTimeToLive' % ACTION_PREFIX:
            response = Response()
            response.status_code = 200
            if data['TableName'] in self._table_ttl_map:
                if self._table_ttl_map[data['TableName']]['Status']:
                    ttl_status = 'ENABLED'
                else:
                    ttl_status = 'DISABLED'
                response._content = json.dumps({
                    'TimeToLiveDescription': {
                        'AttributeName':
                        self._table_ttl_map[data['TableName']]
                        ['AttributeName'],
                        'TimeToLiveStatus':
                        ttl_status
                    }
                })
            else:  # TTL for dynamodb table not set
                response._content = json.dumps({
                    'TimeToLiveDescription': {
                        'TimeToLiveStatus': 'DISABLED'
                    }
                })

            fix_headers_for_updated_response(response)
            return response

        elif action == '%s.TagResource' % ACTION_PREFIX or action == '%s.UntagResource' % ACTION_PREFIX:
            response = Response()
            response.status_code = 200
            response._content = ''  # returns an empty body on success.
            fix_headers_for_updated_response(response)
            return response

        elif action == '%s.ListTagsOfResource' % ACTION_PREFIX:
            response = Response()
            response.status_code = 200
            response._content = json.dumps({
                'Tags': [{
                    'Key': k,
                    'Value': v
                } for k, v in TABLE_TAGS.get(data['ResourceArn'], {}).items()]
            })
            fix_headers_for_updated_response(response)
            return response

        return True
Beispiel #49
0
from requests.models import Response

resp = Response()
resp.code = "expired"
resp.error_type = "expired"
resp.status_code = 200
resp._content = b'["Under-Review","IGO-Complete","Passed","Failed","Resequence-Pool","Repool-Sample","Recapture-Sample","New-Library-Needed","Required-Additional-Reads"]'
Beispiel #50
0
def util_load_csv(path):
    with open(path, 'r') as f:
        lines = f.read()
    the_response = Response()
    the_response._content = str.encode(lines)
    return the_response
 def listener(**kwargs):
     response = Response()
     response.status_code = 200
     response._content = json.dumps(
         kwargs['data']) if kwargs['data'] else '{}'
     return response
Beispiel #52
0
def util_load_txt_to_json(path):
    with open(path) as f:
        lines = f.read()
    the_response = Response()
    the_response._content = str.encode(lines)
    return the_response
Beispiel #53
0
    def forward_request(self, method, path, data, headers):
        data = data and json.loads(to_str(data))

        # Paths to match
        regex2 = r'^/restapis/([A-Za-z0-9_\-]+)/([A-Za-z0-9_\-]+)/%s/(.*)$' % PATH_USER_REQUEST

        if re.match(regex2, path):
            search_match = re.search(regex2, path)
            api_id = search_match.group(1)
            stage = search_match.group(2)
            relative_path_w_query_params = '/%s' % search_match.group(3)

            relative_path, query_string_params = extract_query_string_params(
                path=relative_path_w_query_params)

            path_map = helpers.get_rest_api_paths(rest_api_id=api_id)
            try:
                extracted_path, resource = get_resource_for_path(
                    path=relative_path, path_map=path_map)
            except Exception:
                return make_error('Unable to find path %s' % path, 404)

            integrations = resource.get('resourceMethods', {})
            integration = integrations.get(method, {})
            if not integration:
                integration = integrations.get('ANY', {})
            integration = integration.get('methodIntegration')
            if not integration:
                if method == 'OPTIONS' and 'Origin' in headers:
                    # default to returning CORS headers if this is an OPTIONS request
                    return get_cors_response(headers)
                return make_error(
                    'Unable to find integration for path %s' % path, 404)

            uri = integration.get('uri')
            if method == 'POST' and integration['type'] == 'AWS':
                if uri.endswith('kinesis:action/PutRecords'):
                    template = integration['requestTemplates'][
                        APPLICATION_JSON]
                    new_request = aws_stack.render_velocity_template(
                        template, data)

                    # forward records to target kinesis stream
                    headers = aws_stack.mock_aws_request_headers(
                        service='kinesis')
                    headers[
                        'X-Amz-Target'] = kinesis_listener.ACTION_PUT_RECORDS
                    result = common.make_http_request(url=TEST_KINESIS_URL,
                                                      method='POST',
                                                      data=new_request,
                                                      headers=headers)
                    return result

                elif uri.startswith(
                        'arn:aws:apigateway:') and ':sqs:path' in uri:
                    template = integration['requestTemplates'][
                        APPLICATION_JSON]
                    account_id, queue = uri.split('/')[-2:]
                    region_name = uri.split(':')[3]

                    new_request = aws_stack.render_velocity_template(
                        template, data) + '&QueueName=%s' % queue
                    headers = aws_stack.mock_aws_request_headers(
                        service='sqs', region_name=region_name)

                    url = urljoin(
                        TEST_SQS_URL,
                        '%s/%s?%s' % (account_id, queue, new_request))
                    result = common.make_http_request(url,
                                                      method='GET',
                                                      headers=headers)
                    return result

                else:
                    msg = 'API Gateway action uri "%s" not yet implemented' % uri
                    LOGGER.warning(msg)
                    return make_error(msg, 404)

            elif integration['type'] == 'AWS_PROXY':
                if uri.startswith(
                        'arn:aws:apigateway:') and ':lambda:path' in uri:
                    func_arn = uri.split(':lambda:path')[1].split(
                        'functions/')[1].split('/invocations')[0]
                    data_str = json.dumps(data) if isinstance(
                        data, (dict, list)) else data
                    account_id = uri.split(':lambda:path')[1].split(
                        ':function:')[0].split(':')[-1]

                    source_ip = headers['X-Forwarded-For'].split(',')[-2]

                    # Sample request context:
                    # https://docs.aws.amazon.com/apigateway/latest/developerguide/api-gateway-create-api-as-simple-proxy-for-lambda.html#api-gateway-create-api-as-simple-proxy-for-lambda-test
                    request_context = {
                        'path': relative_path,
                        'accountId': account_id,
                        'resourceId': resource.get('id'),
                        'stage': stage,
                        'identity': {
                            'accountId': account_id,
                            'sourceIp': source_ip,
                            'userAgent': headers['User-Agent'],
                        }
                    }

                    try:
                        path_params = extract_path_params(
                            path=relative_path, extracted_path=extracted_path)
                    except Exception:
                        path_params = {}

                    result = lambda_api.process_apigateway_invocation(
                        func_arn,
                        relative_path,
                        data_str,
                        headers,
                        path_params=path_params,
                        query_string_params=query_string_params,
                        method=method,
                        resource_path=path,
                        request_context=request_context)

                    if isinstance(result, FlaskResponse):
                        return flask_to_requests_response(result)

                    response = Response()
                    parsed_result = result if isinstance(
                        result, dict) else json.loads(result)
                    parsed_result = common.json_safe(parsed_result)
                    response.status_code = int(
                        parsed_result.get('statusCode', 200))
                    response.headers.update(parsed_result.get('headers', {}))
                    try:
                        if isinstance(parsed_result['body'], dict):
                            response._content = json.dumps(
                                parsed_result['body'])
                        else:
                            response._content = parsed_result['body']
                    except Exception:
                        response._content = '{}'
                    return response
                else:
                    msg = 'API Gateway action uri "%s" not yet implemented' % uri
                    LOGGER.warning(msg)
                    return make_error(msg, 404)

            elif integration['type'] == 'HTTP':
                function = getattr(requests, method.lower())
                if isinstance(data, dict):
                    data = json.dumps(data)
                result = function(integration['uri'],
                                  data=data,
                                  headers=headers)
                return result

            else:
                msg = (
                    'API Gateway integration type "%s" for method "%s" not yet implemented'
                    % (integration['type'], method))
                LOGGER.warning(msg)
                return make_error(msg, 404)

            return 200

        if re.match(PATH_REGEX_AUTHORIZERS, path):
            return handle_authorizers(method, path, data, headers)

        return True
Beispiel #54
0
    def patched(self,
                method,
                url,
                params=None,
                data=None,
                headers=None,
                cookies=None,
                files=None,
                auth=None,
                timeout=None,
                allow_redirects=True,
                proxies=None,
                hooks=None,
                stream=None,
                verify=None,
                cert=None):

        ### Borrowed directly from requests.Session.request ###
        method = builtin_str(method)

        # Create the Request.
        req = Request(
            method=method.upper(),
            url=url,
            headers=headers,
            files=files,
            data=data or {},
            params=params or {},
            auth=auth,
            cookies=cookies,
            hooks=hooks,
        )
        prep = self.prepare_request(req)

        # build filename
        values = params or {}
        if data:
            values.update(data)

        parsed_url = urlparse(url)
        filename = build_filename(parsed_url.path, values)

        # determine which dataset to use

        dataset = None
        dataset_file = None

        try:
            dataset_file = open(CURRENT_DATASET_FILENAME, "rw")
        except IOError:
            pass

        if dataset_file:
            flock(dataset_file, LOCK_EX)  # block until we can acquire the lock
            dataset = dataset_file.read().strip(
            )  # NOTE! `dataset` may be set to ""

        if not dataset:
            dataset = DEFAULT_DATASET

        # try to load file
        full_path = os.path.join(dataset_folder, dataset, parsed_url.hostname,
                                 filename[0])

        logging.info('Attempting to load dataset: {}'.format(full_path))

        content = ''
        if os.path.exists(full_path):
            with open(full_path, 'r') as original_file:
                content = original_file.read()
                status_code = 200
                # TODO: mime-type
        else:
            status_code = 417
            content = 'The dataset {} could not be found.'.format(full_path)

        # TODO: fail violently on error?
        # Fudge the response object...
        resp = Response()
        resp.status_code = status_code
        resp.url = prep.url
        resp._content = content

        if dataset_file:
            dataset_file.close()

        return resp
Beispiel #55
0
    def forward_request(self, method, path, data, headers):

        modified_data = None

        # If this request contains streaming v4 authentication signatures, strip them from the message
        # Related isse: https://github.com/localstack/localstack/issues/98
        # TODO we should evaluate whether to replace moto s3 with scality/S3:
        # https://github.com/scality/S3/issues/237
        if headers.get('x-amz-content-sha256') == 'STREAMING-AWS4-HMAC-SHA256-PAYLOAD':
            modified_data = strip_chunk_signatures(data)

        # POST requests to S3 may include a "${filename}" placeholder in the
        # key, which should be replaced with an actual file name before storing.
        if method == 'POST':
            original_data = modified_data or data
            expanded_data = expand_multipart_filename(original_data, headers)
            if expanded_data is not original_data:
                modified_data = expanded_data

        # persist this API call to disk
        persistence.record('s3', method, path, data, headers)

        parsed = urlparse.urlparse(path)
        query = parsed.query
        path = parsed.path
        bucket = path.split('/')[1]
        query_map = urlparse.parse_qs(query)
        if query == 'notification' or 'notification' in query_map:
            response = Response()
            response.status_code = 200
            if method == 'GET':
                # TODO check if bucket exists
                result = '<NotificationConfiguration xmlns="%s">' % XMLNS_S3
                if bucket in S3_NOTIFICATIONS:
                    notif = S3_NOTIFICATIONS[bucket]
                    events_string = '\n'.join(['<Event>%s</Event>' % e for e in notif['Event']])
                    for dest in ['Queue', 'Topic', 'CloudFunction']:
                        if dest in notif:
                            result += ("""<{dest}Configuration>
                                        <Id>{uid}</Id>
                                        <{dest}>{endpoint}</{dest}>
                                        {events}
                                    </{dest}Configuration>""").format(
                                dest=dest, uid=uuid.uuid4(),
                                endpoint=notif[dest],
                                events=events_string)
                result += '</NotificationConfiguration>'
                response._content = result

            if method == 'PUT':
                parsed = xmltodict.parse(data)
                notif_config = parsed.get('NotificationConfiguration')
                for dest in ['Queue', 'Topic', 'CloudFunction']:
                    config = notif_config.get('%sConfiguration' % (dest))
                    if config:
                        # TODO: what if we have multiple destinations - would we overwrite the config?
                        notification_details = {
                            'Id': config.get('Id'),
                            'Event': config.get('Event'),
                            dest: config.get(dest),
                            'Filter': config.get('Filter')
                        }
                        S3_NOTIFICATIONS[bucket] = json.loads(json.dumps(notification_details))

            # return response for ?notification request
            return response

        if query == 'cors' or 'cors' in query_map:
            if method == 'GET':
                return get_cors(bucket)
            if method == 'PUT':
                return set_cors(bucket, data)
            if method == 'DELETE':
                return delete_cors(bucket)

        if modified_data:
            return Request(data=modified_data, headers=headers, method=method)
        return True
    def forward_request(self, method, path, data, headers):
        if path.startswith('/shell'):
            return True
        if method == 'OPTIONS':
            return 200

        data = json.loads(to_str(data))
        ddb_client = aws_stack.connect_to_service('dynamodb')

        if random.random() < config.DYNAMODB_ERROR_PROBABILITY:
            return error_response_throughput()

        ProxyListenerDynamoDB.thread_local.existing_item = None

        action = headers.get('X-Amz-Target')
        if action == '%s.CreateTable' % ACTION_PREFIX:
            # Check if table exists, to avoid error log output from DynamoDBLocal
            table_names = ddb_client.list_tables()['TableNames']
            if to_str(data['TableName']) in table_names:
                return 200
        elif action in ('%s.PutItem' % ACTION_PREFIX,
                        '%s.UpdateItem' % ACTION_PREFIX,
                        '%s.DeleteItem' % ACTION_PREFIX):
            # find an existing item and store it in a thread-local, so we can access it in return_response,
            # in order to determine whether an item already existed (MODIFY) or not (INSERT)
            try:
                ProxyListenerDynamoDB.thread_local.existing_item = find_existing_item(
                    data)
            except Exception as e:
                if 'ResourceNotFoundException' in str(e):
                    return get_table_not_found_error()
                raise
        elif action == '%s.DescribeTable' % ACTION_PREFIX:
            # Check if table exists, to avoid error log output from DynamoDBLocal
            table_names = ddb_client.list_tables()['TableNames']
            if to_str(data['TableName']) not in table_names:
                return get_table_not_found_error()
        elif action == '%s.DeleteTable' % ACTION_PREFIX:
            # Check if table exists, to avoid error log output from DynamoDBLocal
            table_names = ddb_client.list_tables()['TableNames']
            if to_str(data['TableName']) not in table_names:
                return get_table_not_found_error()
        elif action == '%s.BatchWriteItem' % ACTION_PREFIX:
            existing_items = []
            for table_name in sorted(data['RequestItems'].keys()):
                for request in data['RequestItems'][table_name]:
                    for key in ['PutRequest', 'DeleteRequest']:
                        inner_request = request.get(key)
                        if inner_request:
                            existing_items.append(
                                find_existing_item(inner_request, table_name))
            ProxyListenerDynamoDB.thread_local.existing_items = existing_items
        elif action == '%s.TransactWriteItems' % ACTION_PREFIX:
            existing_items = []
            for item in data['TransactItems']:
                for key in ['Put', 'Update', 'Delete']:
                    inner_item = item.get(key)
                    if inner_item:
                        existing_items.append(find_existing_item(inner_item))
            ProxyListenerDynamoDB.thread_local.existing_items = existing_items
        elif action == '%s.UpdateTimeToLive' % ACTION_PREFIX:
            # TODO: TTL status is maintained/mocked but no real expiry is happening for items
            response = Response()
            response.status_code = 200
            self._table_ttl_map[data['TableName']] = {
                'AttributeName':
                data['TimeToLiveSpecification']['AttributeName'],
                'Status': data['TimeToLiveSpecification']['Enabled']
            }
            response._content = json.dumps(
                {'TimeToLiveSpecification': data['TimeToLiveSpecification']})
            fix_headers_for_updated_response(response)
            return response
        elif action == '%s.DescribeTimeToLive' % ACTION_PREFIX:
            response = Response()
            response.status_code = 200
            if data['TableName'] in self._table_ttl_map:
                if self._table_ttl_map[data['TableName']]['Status']:
                    ttl_status = 'ENABLED'
                else:
                    ttl_status = 'DISABLED'
                response._content = json.dumps({
                    'TimeToLiveDescription': {
                        'AttributeName':
                        self._table_ttl_map[data['TableName']]
                        ['AttributeName'],
                        'TimeToLiveStatus':
                        ttl_status
                    }
                })
            else:  # TTL for dynamodb table not set
                response._content = json.dumps({
                    'TimeToLiveDescription': {
                        'TimeToLiveStatus': 'DISABLED'
                    }
                })
            fix_headers_for_updated_response(response)
            return response
        elif action == '%s.TagResource' % ACTION_PREFIX or action == '%s.UntagResource' % ACTION_PREFIX:
            response = Response()
            response.status_code = 200
            response._content = ''  # returns an empty body on success.
            fix_headers_for_updated_response(response)
            return response
        elif action == '%s.ListTagsOfResource' % ACTION_PREFIX:
            response = Response()
            response.status_code = 200
            response._content = json.dumps({
                'Tags': []
            })  # TODO: mocked and returns an empty list of tags for now.
            fix_headers_for_updated_response(response)
            return response

        return True
Beispiel #57
0
    def forward_request(self, method, path, data, headers):

        if common.INFRA_STOPPED:
            return 503

        if config.EDGE_FORWARD_URL:
            return do_forward_request_network(
                0,
                method,
                path,
                data,
                headers,
                target_url=config.EDGE_FORWARD_URL)

        # kill the process if we receive this header
        headers.get(HEADER_KILL_SIGNAL) and sys.exit(0)

        target = headers.get("x-amz-target", "")
        auth_header = get_auth_string(method, path, headers, data)
        if auth_header and not headers.get("authorization"):
            headers["authorization"] = auth_header
        host = headers.get("host", "")
        orig_req_url = headers.pop(HEADER_LOCALSTACK_REQUEST_URL, "")
        headers[HEADER_LOCALSTACK_EDGE_URL] = (re.sub(
            r"^([^:]+://[^/]+).*", r"\1", orig_req_url) or "http://%s" % host)

        # extract API details
        api, port, path, host = get_api_from_headers(headers,
                                                     method=method,
                                                     path=path,
                                                     data=data)

        set_default_region_in_headers(headers)

        if port and int(port) < 0:
            return 404

        if not port:
            api, port = get_api_from_custom_rules(method, path, data,
                                                  headers) or (
                                                      api,
                                                      port,
                                                  )

        should_log_trace = is_trace_logging_enabled(headers)
        if api and should_log_trace:
            # print request trace for debugging, if enabled
            LOG.debug('IN(%s): "%s %s" - headers: %s - data: %s', api, method,
                      path, dict(headers), data)

        if not port:
            if method == "OPTIONS":
                if api and should_log_trace:
                    # print request trace for debugging, if enabled
                    LOG.debug('IN(%s): "%s %s" - status: %s', api, method,
                              path, 200)
                return 200

            if api in ["", None, API_UNKNOWN]:
                truncated = truncate(data)
                if auth_header or target or data or path not in [
                        "/", "/favicon.ico"
                ]:
                    LOG.info(
                        ('Unable to find forwarding rule for host "%s", path "%s %s", '
                         'target header "%s", auth header "%s", data "%s"'),
                        host,
                        method,
                        path,
                        target,
                        auth_header,
                        truncated,
                    )
            else:
                LOG.info(
                    ('Unable to determine forwarding port for API "%s" - please '
                     "make sure this API is enabled via the SERVICES configuration"
                     ),
                    api,
                )
            response = Response()
            response.status_code = 404
            response._content = '{"status": "running"}'
            return response

        if api and not headers.get("Authorization"):
            headers["Authorization"] = aws_stack.mock_aws_request_headers(
                api)["Authorization"]
        headers[HEADER_TARGET_API] = str(api)

        headers["Host"] = host
        if isinstance(data, dict):
            data = json.dumps(data)

        encoding_type = headers.get("Content-Encoding") or ""
        if encoding_type.upper() == GZIP_ENCODING.upper(
        ) and api not in SKIP_GZIP_APIS:
            headers.set("Content-Encoding", IDENTITY_ENCODING)
            data = gzip.decompress(data)

        is_internal_call = is_internal_call_context(headers)

        self._require_service(api)

        lock_ctx = BOOTSTRAP_LOCK
        if is_internal_call or persistence.is_persistence_restored():
            lock_ctx = empty_context_manager()

        with lock_ctx:
            result = do_forward_request(api,
                                        method,
                                        path,
                                        data,
                                        headers,
                                        port=port)
            if should_log_trace and result not in [None, False, True]:
                result_status_code = getattr(result, "status_code", result)
                result_headers = getattr(result, "headers", {})
                result_content = getattr(result, "content", "")
                LOG.debug(
                    'OUT(%s): "%s %s" - status: %s - response headers: %s - response: %s',
                    api,
                    method,
                    path,
                    result_status_code,
                    dict(result_headers or {}),
                    result_content,
                )
            return result
    def test_is_eligible_true_due_to_error_code(self):
        response = Response()
        response._content = b'{"success": 1}'
        response.status_code = 503

        self.assertTrue(self.retries_handler.is_eligible(response))
Beispiel #59
0
    def return_response(self, method, path, data, headers, response,
                        request_handler):

        if method == 'POST' and path == '/':
            req_data = urlparse.parse_qs(to_str(data))
            action = req_data.get('Action', [None])[0]
            event_type = None
            queue_url = None
            if action == 'CreateQueue':
                event_type = event_publisher.EVENT_SQS_CREATE_QUEUE
                response_data = xmltodict.parse(response.content)
                if 'CreateQueueResponse' in response_data:
                    queue_url = response_data['CreateQueueResponse'][
                        'CreateQueueResult']['QueueUrl']
            elif action == 'DeleteQueue':
                event_type = event_publisher.EVENT_SQS_DELETE_QUEUE
                queue_url = req_data.get('QueueUrl', [None])[0]

            if event_type and queue_url:
                event_publisher.fire_event(
                    event_type,
                    payload={'u': event_publisher.get_hash(queue_url)})

            # patch the response and return the correct endpoint URLs
            if action in ('CreateQueue', 'GetQueueUrl', 'ListQueues'):
                content_str = content_str_original = to_str(response.content)
                new_response = Response()
                new_response.status_code = response.status_code
                new_response.headers = response.headers
                if config.USE_SSL and '<QueueUrl>http://' in content_str:
                    # return https://... if we're supposed to use SSL
                    content_str = re.sub(r'<QueueUrl>\s*http://',
                                         r'<QueueUrl>https://', content_str)
                # expose external hostname:port
                external_port = get_external_port(headers, request_handler)
                content_str = re.sub(
                    r'<QueueUrl>\s*([a-z]+)://[^<]*:([0-9]+)/([^<]*)\s*</QueueUrl>',
                    r'<QueueUrl>\1://%s:%s/\3</QueueUrl>' %
                    (HOSTNAME_EXTERNAL, external_port), content_str)
                new_response._content = content_str
                if content_str_original != new_response._content:
                    # if changes have been made, return patched response
                    new_response.headers['content-length'] = len(
                        new_response._content)
                    return new_response

            # Since the following 2 API calls are not implemented in ElasticMQ, we're mocking them
            # and letting them to return an empty response
            if action == 'TagQueue':
                new_response = Response()
                new_response.status_code = 200
                new_response._content = (
                    '<?xml version="1.0"?>'
                    '<TagQueueResponse>'
                    '<ResponseMetadata>'  # noqa: W291
                    '<RequestId>{}</RequestId>'  # noqa: W291
                    '</ResponseMetadata>'  # noqa: W291
                    '</TagQueueResponse>').format(uuid.uuid4())
                return new_response
            elif action == 'ListQueueTags':
                new_response = Response()
                new_response.status_code = 200
                new_response._content = (
                    '<?xml version="1.0"?>'
                    '<ListQueueTagsResponse xmlns="{}">'
                    '<ListQueueTagsResult/>'  # noqa: W291
                    '<ResponseMetadata>'  # noqa: W291
                    '<RequestId>{}</RequestId>'  # noqa: W291
                    '</ResponseMetadata>'  # noqa: W291
                    '</ListQueueTagsResponse>').format(XMLNS_SQS, uuid.uuid4())
                return new_response
Beispiel #60
0
    def return_response(self, method, path, data, headers, response,
                        request_handler):
        if method == 'OPTIONS' and path == '/':
            # Allow CORS preflight requests to succeed.
            return 200

        if method != 'POST':
            return

        if response.status_code >= 400:
            return response

        region_name = extract_region_from_auth_header(headers)
        req_data = urlparse.parse_qs(to_str(data))
        action = req_data.get('Action', [None])[0]
        content_str = content_str_original = to_str(response.content)

        self._fire_event(req_data, response)

        # patch the response and add missing attributes
        if action == 'GetQueueAttributes':
            content_str = self._add_queue_attributes(path, req_data,
                                                     content_str, headers)

        # patch the response and return the correct endpoint URLs / ARNs
        if action in ('CreateQueue', 'GetQueueUrl', 'ListQueues',
                      'GetQueueAttributes'):
            if config.USE_SSL and '<QueueUrl>http://' in content_str:
                # return https://... if we're supposed to use SSL
                content_str = re.sub(r'<QueueUrl>\s*http://',
                                     r'<QueueUrl>https://', content_str)
            # expose external hostname:port
            external_port = SQS_PORT_EXTERNAL or get_external_port(
                headers, request_handler)
            content_str = re.sub(
                r'<QueueUrl>\s*([a-z]+)://[^<]*:([0-9]+)/([^<]*)\s*</QueueUrl>',
                r'<QueueUrl>\1://%s:%s/\3</QueueUrl>' %
                (HOSTNAME_EXTERNAL, external_port), content_str)
            # fix queue ARN
            content_str = re.sub(
                r'<([a-zA-Z0-9]+)>\s*arn:aws:sqs:elasticmq:([^<]+)</([a-zA-Z0-9]+)>',
                r'<\1>arn:aws:sqs:%s:\2</\3>' % (region_name), content_str)

        if content_str_original != content_str:
            # if changes have been made, return patched response
            new_response = Response()
            new_response.status_code = response.status_code
            new_response.headers = response.headers
            new_response._content = content_str
            new_response.headers['content-length'] = len(new_response._content)
            return new_response

        # Since the following 2 API calls are not implemented in ElasticMQ, we're mocking them
        # and letting them to return an empty response
        if action == 'TagQueue':
            new_response = Response()
            new_response.status_code = 200
            new_response._content = ("""
                <?xml version="1.0"?>
                <TagQueueResponse>
                    <ResponseMetadata>
                        <RequestId>{}</RequestId>
                    </ResponseMetadata>
                </TagQueueResponse>
            """).strip().format(uuid.uuid4())
            return new_response
        elif action == 'ListQueueTags':
            new_response = Response()
            new_response.status_code = 200
            new_response._content = ("""
                <?xml version="1.0"?>
                <ListQueueTagsResponse xmlns="{}">
                    <ListQueueTagsResult/>
                    <ResponseMetadata>
                        <RequestId>{}</RequestId>
                    </ResponseMetadata>
                </ListQueueTagsResponse>
            """).strip().format(XMLNS_SQS, uuid.uuid4())
            return new_response