コード例 #1
0
    def test_fetch(self):
        """Test whether a list of issues is returned"""

        requests = []

        bodies_json = [
            read_file('data/jira/jira_issues_page_1.json'),
            read_file('data/jira/jira_issues_page_2.json')
        ]
        comment_json = read_file('data/jira/jira_comments_issue_page_2.json')
        empty_comment = read_file('data/jira/jira_comments_issue_empty.json')

        body = read_file('data/jira/jira_fields.json')

        def request_callback(method, uri, headers):
            body = bodies_json.pop(0)
            requests.append(httpretty.last_request())
            return 200, headers, body

        httpretty.register_uri(httpretty.GET,
                               JIRA_SEARCH_URL,
                               responses=[
                                   httpretty.Response(body=request_callback)
                                   for _ in range(2)
                               ])

        httpretty.register_uri(httpretty.GET,
                               JIRA_ISSUE_1_COMMENTS_URL,
                               body=empty_comment,
                               status=200)

        httpretty.register_uri(httpretty.GET,
                               JIRA_ISSUE_2_COMMENTS_URL,
                               body=comment_json,
                               status=200)

        httpretty.register_uri(httpretty.GET,
                               JIRA_ISSUE_3_COMMENTS_URL,
                               body=empty_comment,
                               status=200)

        httpretty.register_uri(httpretty.GET,
                               JIRA_FIELDS_URL,
                               body=body,
                               status=200)

        jira = Jira(JIRA_SERVER_URL)

        issues = [issue for issue in jira.fetch()]

        body_json = json.loads(body)

        custom_fields = filter_custom_fields(body_json)

        expected_req = [{
            'expand': ['renderedFields,transitions,operations,changelog'],
            'jql': ['updated > 0 order by updated asc'],
            'startAt': ['0'],
            'maxResults': ['100']
        }, {
            'expand': ['renderedFields,transitions,operations,changelog'],
            'jql': ['updated > 0 order by updated asc'],
            'startAt': ['2'],
            'maxResults': ['100']
        }]

        for i in range(len(expected_req)):
            self.assertEqual(requests[i].method, 'GET')
            self.assertRegex(requests[i].path, '/rest/api/2/search')
            self.assertDictEqual(requests[i].querystring, expected_req[i])

        self.assertEqual(len(issues), 3)

        issue = issues[0]
        self.assertEqual(issue['origin'], 'http://example.com')
        self.assertEqual(issue['uuid'],
                         '6a7ba2a01aee56603b9d8a5f6b40c843fc089b2f')
        self.assertEqual(issue['updated_on'], 1457015567)
        self.assertEqual(issue['category'], 'issue')
        self.assertEqual(issue['tag'], 'http://example.com')
        self.assertEqual(issue['data']['key'], 'HELP-6043')
        self.assertEqual(issue['data']['fields']['issuetype']['name'],
                         'extRequest')
        self.assertEqual(issue['data']['fields']['creator']['name'], 'user2')
        self.assertEqual(issue['data']['fields']['assignee']['name'], 'user1')
        self.assertEqual(issue['data']['fields']['assignee']['name'], 'user1')
        self.assertEqual(issue['data']['fields']['assignee']['name'], 'user1')
        self.assertEqual(issue['data']['fields']['assignee']['name'], 'user1')
        self.assertEqual(issue['data']['fields']['customfield_10301']['id'],
                         custom_fields['customfield_10301']['id'])
        self.assertEqual(issue['data']['fields']['customfield_10301']['name'],
                         custom_fields['customfield_10301']['name'])
        self.assertEqual(issue['data']['fields']['customfield_10400']['id'],
                         custom_fields['customfield_10400']['id'])
        self.assertEqual(issue['data']['fields']['customfield_10400']['name'],
                         custom_fields['customfield_10400']['name'])
        self.assertEqual(issue['data']['fields']['customfield_10600']['id'],
                         custom_fields['customfield_10600']['id'])
        self.assertEqual(issue['data']['fields']['customfield_10600']['name'],
                         custom_fields['customfield_10600']['name'])
        self.assertEqual(issue['data']['fields']['customfield_10603']['id'],
                         custom_fields['customfield_10603']['id'])
        self.assertEqual(issue['data']['fields']['customfield_10603']['name'],
                         custom_fields['customfield_10603']['name'])
        self.assertEqual(issue['data']['comments_data'], [])

        issue = issues[1]
        self.assertEqual(issue['origin'], 'http://example.com')
        self.assertEqual(issue['uuid'],
                         '3c3d67925b108a37f88cc6663f7f7dd493fa818c')
        self.assertEqual(issue['updated_on'], 1457015417)
        self.assertEqual(issue['category'], 'issue')
        self.assertEqual(issue['tag'], 'http://example.com')
        self.assertEqual(issue['data']['key'], 'HELP-6042')
        self.assertEqual(issue['data']['fields']['issuetype']['name'],
                         'extRequest')
        self.assertEqual(issue['data']['fields']['creator']['name'], 'user2')
        self.assertEqual(issue['data']['fields']['assignee']['name'], 'user1')
        self.assertEqual(issue['data']['fields']['customfield_10301']['id'],
                         custom_fields['customfield_10301']['id'])
        self.assertEqual(issue['data']['fields']['customfield_10301']['name'],
                         custom_fields['customfield_10301']['name'])
        self.assertEqual(issue['data']['fields']['customfield_10400']['id'],
                         custom_fields['customfield_10400']['id'])
        self.assertEqual(issue['data']['fields']['customfield_10400']['name'],
                         custom_fields['customfield_10400']['name'])
        self.assertEqual(issue['data']['fields']['customfield_10600']['id'],
                         custom_fields['customfield_10600']['id'])
        self.assertEqual(issue['data']['fields']['customfield_10600']['name'],
                         custom_fields['customfield_10600']['name'])
        self.assertEqual(issue['data']['fields']['customfield_10603']['id'],
                         custom_fields['customfield_10603']['id'])
        self.assertEqual(issue['data']['fields']['customfield_10603']['name'],
                         custom_fields['customfield_10603']['name'])
        self.assertEqual(len(issue['data']['comments_data']), 2)
        self.assertEqual(
            issue['data']['comments_data'][0]['author']['displayName'],
            'Tim Monks')
        self.assertEqual(
            issue['data']['comments_data'][1]['author']['displayName'],
            'Scott Monks')

        issue = issues[2]
        self.assertEqual(issue['origin'], 'http://example.com')
        self.assertEqual(issue['uuid'],
                         '1c7765e2a5d27495cf389f5f951c544693c4655f')
        self.assertEqual(issue['updated_on'], 1457006245)
        self.assertEqual(issue['category'], 'issue')
        self.assertEqual(issue['tag'], 'http://example.com')
        self.assertEqual(issue['data']['key'], 'HELP-6041')
        self.assertEqual(issue['data']['fields']['issuetype']['name'],
                         'extRequest')
        self.assertEqual(issue['data']['fields']['creator']['name'], 'user2')
        self.assertEqual(issue['data']['fields']['assignee']['name'], 'user3')
        self.assertEqual(issue['data']['fields']['customfield_10301']['id'],
                         custom_fields['customfield_10301']['id'])
        self.assertEqual(issue['data']['fields']['customfield_10301']['name'],
                         custom_fields['customfield_10301']['name'])
        self.assertEqual(issue['data']['fields']['customfield_10400']['id'],
                         custom_fields['customfield_10400']['id'])
        self.assertEqual(issue['data']['fields']['customfield_10400']['name'],
                         custom_fields['customfield_10400']['name'])
        self.assertEqual(issue['data']['fields']['customfield_10600']['id'],
                         custom_fields['customfield_10600']['id'])
        self.assertEqual(issue['data']['fields']['customfield_10600']['name'],
                         custom_fields['customfield_10600']['name'])
        self.assertEqual(issue['data']['fields']['customfield_10603']['id'],
                         custom_fields['customfield_10603']['id'])
        self.assertEqual(issue['data']['fields']['customfield_10603']['name'],
                         custom_fields['customfield_10603']['name'])
        self.assertEqual(issue['data']['comments_data'], [])
コード例 #2
0
def setup_http_server():
    """Setup a mock HTTP server"""

    http_requests = []

    issues_body = read_file('data/redmine/redmine_issues.json', 'rb')
    issues_next_body = read_file('data/redmine/redmine_issues_next.json', 'rb')
    issues_empty_body = read_file('data/redmine/redmine_issues_empty.json', 'rb')
    issue_2_body = read_file('data/redmine/redmine_issue_2.json', 'rb')
    issue_5_body = read_file('data/redmine/redmine_issue_5.json', 'rb')
    issue_9_body = read_file('data/redmine/redmine_issue_9.json', 'rb')
    issue_7311_body = read_file('data/redmine/redmine_issue_7311.json', 'rb')
    user_3_body = read_file('data/redmine/redmine_user_3.json', 'rb')
    user_4_body = read_file('data/redmine/redmine_user_4.json', 'rb')
    user_24_body = read_file('data/redmine/redmine_user_24.json', 'rb')
    user_25_body = read_file('data/redmine/redmine_user_25.json', 'rb')

    def request_callback(method, uri, headers):
        last_request = httpretty.last_request()
        params = last_request.querystring

        status = 200

        if uri.startswith(REDMINE_ISSUES_URL):
            if (params['updated_on'][0] == '>=1970-01-01T00:00:00Z' and
                params['offset'][0] == '0'):
                body = issues_body
            elif (params['updated_on'][0] == '>=1970-01-01T00:00:00Z' and
                  params['offset'][0] == '3'):
                body = issues_next_body
            elif (params['updated_on'][0] == '>=2016-07-27T00:00:00Z' and
                  params['offset'][0] == '0'):
                body = issues_next_body
            else:
                body = issues_empty_body
        elif uri.startswith(REDMINE_ISSUE_2_URL):
            body = issue_2_body
        elif uri.startswith(REDMINE_ISSUE_5_URL):
            body = issue_5_body
        elif uri.startswith(REDMINE_ISSUE_9_URL):
            body = issue_9_body
        elif uri.startswith(REDMINE_ISSUE_7311_URL):
            body = issue_7311_body
        elif uri.startswith(REDMINE_USER_3_URL):
            body = user_3_body
        elif uri.startswith(REDMINE_USER_4_URL):
            body = user_4_body
        elif uri.startswith(REDMINE_USER_24_URL):
            body = user_24_body
        elif uri.startswith(REDMINE_USER_25_URL):
            body = user_25_body
        elif uri.startswith(REDMINE_NOT_FOUND_USER_URL):
            body = "Not Found"
            status = 404
        else:
            raise

        http_requests.append(last_request)

        return (status, headers, body)

    for url in REDMINE_URL_LIST:
        httpretty.register_uri(httpretty.GET,
                               url,
                               responses=[
                                   httpretty.Response(body=request_callback)
                               ])
    return http_requests
コード例 #3
0
def setup_http_server():
    """Setup a mock HTTP server"""

    http_requests = []

    body_contents = read_file('data/confluence/confluence_contents.json', 'rb')
    body_contents_next = read_file(
        'data/confluence/confluence_contents_next.json', 'rb')
    body_contents_empty = read_file(
        'data/confluence/confluence_contents_empty.json', 'rb')
    body_content_1_v1 = read_file(
        'data/confluence/confluence_content_1_v1.json', 'rb')
    body_content_1_v2 = read_file(
        'data/confluence/confluence_content_1_v2.json', 'rb')
    body_content_2 = read_file('data/confluence/confluence_content_2_v1.json',
                               'rb')
    body_content_att = read_file(
        'data/confluence/confluence_content_att_v1.json', 'rb')

    def request_callback(method, uri, headers):
        if uri.startswith(CONFLUENCE_CONTENTS_URL):
            params = urllib.parse.parse_qs(urllib.parse.urlparse(uri).query)

            if 'start' in params and params['start'] == ['2']:
                body = body_contents_next
            elif params['cql'][0].startswith(
                    "lastModified>='2016-07-08 00:00'"):
                body = body_contents_empty
            else:
                body = body_contents
        elif uri.startswith(CONFLUENCE_HISTORICAL_CONTENT_1):
            params = urllib.parse.parse_qs(urllib.parse.urlparse(uri).query)

            if params['version'] == ['1']:
                body = body_content_1_v1
            else:
                body = body_content_1_v2
        elif uri.startswith(CONFLUENCE_HISTORICAL_CONTENT_2):
            body = body_content_2
        elif uri.startswith(CONFLUENCE_HISTORICAL_CONTENT_ATT):
            body = body_content_att
        else:
            raise

        http_requests.append(httpretty.last_request())

        return (200, headers, body)

    httpretty.register_uri(
        httpretty.GET,
        CONFLUENCE_CONTENTS_URL,
        responses=[httpretty.Response(body=request_callback)])
    httpretty.register_uri(
        httpretty.GET,
        CONFLUENCE_HISTORICAL_CONTENT_1,
        responses=[httpretty.Response(body=request_callback)])
    httpretty.register_uri(
        httpretty.GET,
        CONFLUENCE_HISTORICAL_CONTENT_2,
        responses=[httpretty.Response(body=request_callback)])
    httpretty.register_uri(
        httpretty.GET,
        CONFLUENCE_HISTORICAL_CONTENT_ATT,
        responses=[httpretty.Response(body=request_callback)])

    return http_requests
コード例 #4
0
    def test_fetch_pinned_from_archive(self):
        """Test whether the right list of topics is returned from the archive when some topics are pinned"""

        bodies_topics = [read_file('data/discourse/discourse_topics_pinned.json'),
                         read_file('data/discourse/discourse_topics_empty.json')]
        body_topic_1148 = read_file('data/discourse/discourse_topic_1148.json')
        body_topic_1149 = read_file('data/discourse/discourse_topic_1149.json')
        body_topic_1150 = read_file('data/discourse/discourse_topic_1150.json')
        body_post = read_file('data/discourse/discourse_post.json')

        def request_callback(method, uri, headers):
            if uri.startswith(DISCOURSE_TOPICS_URL):
                body = bodies_topics.pop(0)
            elif uri.startswith(DISCOURSE_TOPIC_URL_1148):
                body = body_topic_1148
            elif uri.startswith(DISCOURSE_TOPIC_URL_1149):
                body = body_topic_1149
            elif uri.startswith(DISCOURSE_TOPIC_URL_1150):
                body = body_topic_1150
            elif uri.startswith(DISCOURSE_POST_URL_1) or \
                    uri.startswith(DISCOURSE_POST_URL_2):
                body = body_post
            else:
                raise
            return (200, headers, body)

        httpretty.register_uri(httpretty.GET,
                               DISCOURSE_TOPICS_URL,
                               responses=[
                                   httpretty.Response(body=request_callback)
                                   for _ in range(2)
                               ])
        httpretty.register_uri(httpretty.GET,
                               DISCOURSE_TOPIC_URL_1148,
                               responses=[
                                   httpretty.Response(body=request_callback)
                               ])
        httpretty.register_uri(httpretty.GET,
                               DISCOURSE_TOPIC_URL_1149,
                               responses=[
                                   httpretty.Response(body=request_callback)
                               ])
        httpretty.register_uri(httpretty.GET,
                               DISCOURSE_TOPIC_URL_1150,
                               responses=[
                                   httpretty.Response(body=request_callback)
                               ])
        httpretty.register_uri(httpretty.GET,
                               DISCOURSE_POST_URL_1,
                               responses=[
                                   httpretty.Response(body=request_callback)
                               ])
        httpretty.register_uri(httpretty.GET,
                               DISCOURSE_POST_URL_2,
                               responses=[
                                   httpretty.Response(body=request_callback)
                               ])

        # On this tests two topics will be retrieved.
        # One of them was pinned but the date is in range.
        from_date = datetime.datetime(2016, 5, 25, 2, 0, 0)
        self._test_fetch_from_archive(from_date=from_date)
コード例 #5
0
def setup_mock_redmine_server(max_failures=0):
    """Setup a mock Redmine HTTP server"""

    http_requests = []
    failures = max_failures

    issues_body = read_file('data/redmine/redmine_issues.json', 'rb')
    issues_next_body = read_file('data/redmine/redmine_issues_next.json', 'rb')
    issues_empty_body = read_file('data/redmine/redmine_issues_empty.json', 'rb')
    issue_2_body = read_file('data/redmine/redmine_issue_2.json', 'rb')
    issue_5_body = read_file('data/redmine/redmine_issue_5.json', 'rb')
    issue_9_body = read_file('data/redmine/redmine_issue_9.json', 'rb')
    issue_7311_body = read_file('data/redmine/redmine_issue_7311.json', 'rb')
    user_3_body = read_file('data/redmine/redmine_user_3.json', 'rb')
    user_4_body = read_file('data/redmine/redmine_user_4.json', 'rb')
    user_24_body = read_file('data/redmine/redmine_user_24.json', 'rb')
    user_25_body = read_file('data/redmine/redmine_user_25.json', 'rb')

    def request_callback(method, uri, headers):
        nonlocal failures

        status = 200
        last_request = httpretty.last_request()
        params = last_request.querystring

        if uri.startswith(REDMINE_ISSUES_URL):
            updated_on = params['updated_on'][0]
            offset = params['offset'][0]

            if (updated_on == '>=1970-01-01T00:00:00Z' and offset == '0'):
                body = issues_body
            elif (updated_on == '>=1970-01-01T00:00:00Z' and offset == '3'):
                body = issues_next_body
            elif (updated_on == '>=2016-07-27T00:00:00Z' and offset == '0'):
                body = issues_next_body
            elif (updated_on == '>=2011-12-08T17:58:37Z' and offset == '0'):
                body = issues_next_body
            else:
                body = issues_empty_body
        elif uri.startswith(REDMINE_ISSUE_2_URL):
            body = issue_2_body
        elif uri.startswith(REDMINE_ISSUE_5_URL):
            body = issue_5_body
        elif uri.startswith(REDMINE_ISSUE_9_URL):
            body = issue_9_body
        elif uri.startswith(REDMINE_ISSUE_7311_URL):
            if failures > 0:
                status = 500
                body = "Internal Server Error"
                failures -= 1
            else:
                body = issue_7311_body
        elif uri.startswith(REDMINE_USER_3_URL):
            body = user_3_body
        elif uri.startswith(REDMINE_USER_4_URL):
            body = user_4_body
        elif uri.startswith(REDMINE_USER_24_URL):
            body = user_24_body
        elif uri.startswith(REDMINE_USER_25_URL):
            body = user_25_body
        else:
            raise

        http_requests.append(last_request)

        return (status, headers, body)

    for url in REDMINE_URL_LIST:
        httpretty.register_uri(httpretty.GET,
                               url,
                               responses=[
                                   httpretty.Response(body=request_callback)
                               ])

    return http_requests
コード例 #6
0
    def test_fetch(self):
        """Test whether a list of bugs is returned"""

        requests = []
        bodies_csv = [
            read_file('data/bugzilla/bugzilla_buglist.csv'),
            read_file('data/bugzilla/bugzilla_buglist_next.csv'), ""
        ]
        bodies_xml = [
            read_file('data/bugzilla/bugzilla_version.xml', mode='rb'),
            read_file('data/bugzilla/bugzilla_bugs_details.xml', mode='rb'),
            read_file('data/bugzilla/bugzilla_bugs_details_next.xml',
                      mode='rb')
        ]
        bodies_html = [
            read_file('data/bugzilla/bugzilla_bug_activity.html', mode='rb'),
            read_file('data/bugzilla/bugzilla_bug_activity_empty.html',
                      mode='rb')
        ]

        def request_callback(method, uri, headers):
            if uri.startswith(BUGZILLA_BUGLIST_URL):
                body = bodies_csv.pop(0)
            elif uri.startswith(BUGZILLA_BUG_URL):
                body = bodies_xml.pop(0)
            else:
                body = bodies_html[len(requests) % 2]

            requests.append(httpretty.last_request())

            return (200, headers, body)

        httpretty.register_uri(httpretty.GET,
                               BUGZILLA_BUGLIST_URL,
                               responses=[
                                   httpretty.Response(body=request_callback)
                                   for _ in range(3)
                               ])
        httpretty.register_uri(httpretty.GET,
                               BUGZILLA_BUG_URL,
                               responses=[
                                   httpretty.Response(body=request_callback)
                                   for _ in range(2)
                               ])
        httpretty.register_uri(httpretty.GET,
                               BUGZILLA_BUG_ACTIVITY_URL,
                               responses=[
                                   httpretty.Response(body=request_callback)
                                   for _ in range(7)
                               ])

        bg = Bugzilla(BUGZILLA_SERVER_URL, max_bugs=5, max_bugs_csv=500)
        bugs = [bug for bug in bg.fetch()]

        self.assertEqual(len(bugs), 7)

        self.assertEqual(bugs[0]['data']['bug_id'][0]['__text__'], '15')
        self.assertEqual(len(bugs[0]['data']['activity']), 0)
        self.assertEqual(bugs[0]['origin'], BUGZILLA_SERVER_URL)
        self.assertEqual(bugs[0]['uuid'],
                         '5a8a1e25dfda86b961b4146050883cbfc928f8ec')
        self.assertEqual(bugs[0]['updated_on'], 1248276445.0)
        self.assertEqual(bugs[0]['category'], 'bug')
        self.assertEqual(bugs[0]['tag'], BUGZILLA_SERVER_URL)

        self.assertEqual(bugs[6]['data']['bug_id'][0]['__text__'], '888')
        self.assertEqual(len(bugs[6]['data']['activity']), 14)
        self.assertEqual(bugs[6]['origin'], BUGZILLA_SERVER_URL)
        self.assertEqual(bugs[6]['uuid'],
                         'b4009442d38f4241a4e22e3e61b7cd8ef5ced35c')
        self.assertEqual(bugs[6]['updated_on'], 1439404330.0)
        self.assertEqual(bugs[6]['category'], 'bug')
        self.assertEqual(bugs[6]['tag'], BUGZILLA_SERVER_URL)

        # Check requests
        expected = [{
            'ctype': ['xml']
        }, {
            'ctype': ['csv'],
            'limit': ['500'],
            'order': ['changeddate'],
            'chfieldfrom': ['1970-01-01 00:00:00']
        }, {
            'ctype': ['csv'],
            'limit': ['500'],
            'order': ['changeddate'],
            'chfieldfrom': ['2009-07-30 11:35:33']
        }, {
            'ctype': ['csv'],
            'limit': ['500'],
            'order': ['changeddate'],
            'chfieldfrom': ['2015-08-12 18:32:11']
        }, {
            'ctype': ['xml'],
            'id': ['15', '18', '17', '20', '19'],
            'excludefield': ['attachmentdata']
        }, {
            'id': ['15']
        }, {
            'id': ['18']
        }, {
            'id': ['17']
        }, {
            'id': ['20']
        }, {
            'id': ['19']
        }, {
            'ctype': ['xml'],
            'id': ['30', '888'],
            'excludefield': ['attachmentdata']
        }, {
            'id': ['30']
        }, {
            'id': ['888']
        }]

        self.assertEqual(len(requests), len(expected))

        for i in range(len(expected)):
            self.assertDictEqual(requests[i].querystring, expected[i])
コード例 #7
0
    def test_fetch_from_date(self):
        """Test whether a list of topics is returned from a given date"""

        requests_http = []

        bodies_topics = [read_file('data/discourse/discourse_topics.json'),
                         read_file('data/discourse/discourse_topics_empty.json')]
        body_topic_1148 = read_file('data/discourse/discourse_topic_1148.json')
        body_topic_1149 = read_file('data/discourse/discourse_topic_1149.json')
        body_post = read_file('data/discourse/discourse_post.json')

        def request_callback(method, uri, headers):
            if uri.startswith(DISCOURSE_TOPICS_URL):
                body = bodies_topics.pop(0)
            elif uri.startswith(DISCOURSE_TOPIC_URL_1148):
                body = body_topic_1148
            elif uri.startswith(DISCOURSE_TOPIC_URL_1149):
                body = body_topic_1149
            elif uri.startswith(DISCOURSE_POST_URL_1) or \
                    uri.startswith(DISCOURSE_POST_URL_2):
                body = body_post
            else:
                raise

            requests_http.append(httpretty.last_request())

            return (200, headers, body)

        httpretty.register_uri(httpretty.GET,
                               DISCOURSE_TOPICS_URL,
                               responses=[
                                   httpretty.Response(body=request_callback)
                                   for _ in range(2)
                               ])
        httpretty.register_uri(httpretty.GET,
                               DISCOURSE_TOPIC_URL_1148,
                               responses=[
                                   httpretty.Response(body=request_callback)
                               ])
        httpretty.register_uri(httpretty.GET,
                               DISCOURSE_TOPIC_URL_1149,
                               responses=[
                                   httpretty.Response(body=request_callback)
                               ])
        httpretty.register_uri(httpretty.GET,
                               DISCOURSE_POST_URL_1,
                               responses=[
                                   httpretty.Response(body=request_callback)
                               ])
        httpretty.register_uri(httpretty.GET,
                               DISCOURSE_POST_URL_2,
                               responses=[
                                   httpretty.Response(body=request_callback)
                               ])

        # On this tests only one topic will be retrieved
        from_date = datetime.datetime(2016, 5, 25, 2, 0, 0)

        discourse = Discourse(DISCOURSE_SERVER_URL)
        topics = [topic for topic in discourse.fetch(from_date=from_date)]

        self.assertEqual(len(topics), 1)

        self.assertEqual(topics[0]['data']['id'], 1148)
        self.assertEqual(len(topics[0]['data']['post_stream']['posts']), 22)
        self.assertEqual(topics[0]['origin'], DISCOURSE_SERVER_URL)
        self.assertEqual(topics[0]['uuid'], '5298e4e8383c3f73c9fa7c9599779cbe987a48e4')
        self.assertEqual(topics[0]['updated_on'], 1464144769.526)
        self.assertEqual(topics[0]['category'], 'topic')
        self.assertEqual(topics[0]['tag'], DISCOURSE_SERVER_URL)

        # Check requests
        expected = [
            {'page': ['0']},
            {},
            {},
            {}
        ]

        self.assertEqual(len(requests_http), len(expected))

        for i in range(len(expected)):
            self.assertDictEqual(requests_http[i].querystring, expected[i])
コード例 #8
0
def setup_http_server():
    """Setup a mock HTTP server"""

    http_requests = []

    channel_info = read_file('data/mattermost/mattermost_channel.json', 'rb')
    channel_posts = read_file('data/mattermost/mattermost_posts.json', 'rb')
    channel_posts_next = read_file(
        'data/mattermost/mattermost_posts_next.json', 'rb')
    channel_posts_empty = read_file(
        'data/mattermost/mattermost_posts_empty.json', 'rb')
    user_sduenas = read_file('data/mattermost/mattermost_user_sduenas.json',
                             'rb')
    user_valcos = read_file('data/mattermost/mattermost_user_valcos.json',
                            'rb')

    full_response = [channel_posts, channel_posts_next, channel_posts_empty]

    def request_callback(method, uri, headers):
        last_request = httpretty.last_request()
        params = last_request.querystring

        status = 200

        if uri.startswith(MATTERMOST_USER_SDUENAS):
            body = user_sduenas
        elif uri.startswith(MATTERMOST_USER_VALCOS):
            body = user_valcos
        elif uri.startswith(MATTERMOST_CHANNEL_POSTS):
            if 'page' not in params:
                page = 0
            else:
                page = int(params['page'][0])
            body = full_response[page]
        elif uri.startswith(MATTERMOST_CHANNEL_INFO):
            body = channel_info
        else:
            raise Exception("no valid URL")

        http_requests.append(last_request)

        return status, headers, body

    httpretty.register_uri(
        httpretty.GET,
        MATTERMOST_CHANNEL_INFO,
        responses=[httpretty.Response(body=request_callback)])
    httpretty.register_uri(
        httpretty.GET,
        MATTERMOST_CHANNEL_POSTS,
        responses=[httpretty.Response(body=request_callback)])
    httpretty.register_uri(httpretty.GET,
                           MATTERMOST_USER_SDUENAS,
                           responses=[
                               httpretty.Response(body=request_callback)
                               for _ in range(1)
                           ])
    httpretty.register_uri(
        httpretty.GET,
        MATTERMOST_USER_VALCOS,
        responses=[httpretty.Response(body=request_callback)])

    return http_requests
コード例 #9
0
    def test_fetch(self):
        """Test whether a list of issues is returned"""

        requests = []

        bodies_json = [
            read_file('data/jira/jira_issues_page_1.json'),
            read_file('data/jira/jira_issues_page_2.json')
        ]

        body = read_file('data/jira/jira_fields.json')

        def request_callback(method, uri, headers):
            body = bodies_json.pop(0)
            requests.append(httpretty.last_request())
            return (200, headers, body)

        httpretty.register_uri(httpretty.GET,
                               JIRA_SEARCH_URL,
                               responses=[httpretty.Response(body=request_callback) \
                                          for _ in range(2)])

        httpretty.register_uri(httpretty.GET,
                               JIRA_FIELDS_URL,
                               body=body,
                               status=200)

        jira = Jira(JIRA_SERVER_URL)

        issues = [issue for issue in jira.fetch()]

        body_json = json.loads(body)

        custom_fields = filter_custom_fields(body_json)

        expected_req = [{
            'expand': ['renderedFields,transitions,operations,changelog'],
            'jql': [' updated > 0'],
            'startAt': ['0']
        }, {
            'expand': ['renderedFields,transitions,operations,changelog'],
            'jql': [' updated > 0'],
            'startAt': ['2']
        }]

        for i in range(len(expected_req)):
            self.assertEqual(requests[i].method, 'GET')
            self.assertRegex(requests[i].path, '/rest/api/2/search')
            self.assertDictEqual(requests[i].querystring, expected_req[i])

        self.assertEqual(len(issues), 3)

        self.assertEqual(issues[0]['origin'], 'http://example.com')
        self.assertEqual(issues[0]['uuid'],
                         'dfe008e19e2b720d1d377607680e90c250134164')
        self.assertEqual(issues[0]['updated_on'], 1457015567)
        self.assertEqual(issues[0]['category'], 'issue')
        self.assertEqual(issues[0]['tag'], 'http://example.com')
        self.assertEqual(issues[0]['data']['key'], 'HELP-6043')
        self.assertEqual(issues[0]['data']['fields']['issuetype']['name'],
                         'extRequest')
        self.assertEqual(issues[0]['data']['fields']['creator']['name'],
                         'user2')
        self.assertEqual(issues[0]['data']['fields']['assignee']['name'],
                         'user1')
        self.assertEqual(issues[0]['data']['fields']['assignee']['name'],
                         'user1')
        self.assertEqual(issues[0]['data']['fields']['assignee']['name'],
                         'user1')
        self.assertEqual(issues[0]['data']['fields']['assignee']['name'],
                         'user1')
        self.assertEqual(
            issues[0]['data']['fields']['customfield_10301']['id'],
            custom_fields['customfield_10301']['id'])
        self.assertEqual(
            issues[0]['data']['fields']['customfield_10301']['name'],
            custom_fields['customfield_10301']['name'])
        self.assertEqual(
            issues[0]['data']['fields']['customfield_10400']['id'],
            custom_fields['customfield_10400']['id'])
        self.assertEqual(
            issues[0]['data']['fields']['customfield_10400']['name'],
            custom_fields['customfield_10400']['name'])
        self.assertEqual(
            issues[0]['data']['fields']['customfield_10600']['id'],
            custom_fields['customfield_10600']['id'])
        self.assertEqual(
            issues[0]['data']['fields']['customfield_10600']['name'],
            custom_fields['customfield_10600']['name'])
        self.assertEqual(
            issues[0]['data']['fields']['customfield_10603']['id'],
            custom_fields['customfield_10603']['id'])
        self.assertEqual(
            issues[0]['data']['fields']['customfield_10603']['name'],
            custom_fields['customfield_10603']['name'])

        self.assertEqual(issues[1]['origin'], 'http://example.com')
        self.assertEqual(issues[1]['uuid'],
                         '830747ed8cc9af800fcd6284e9dccfdb11daf15b')
        self.assertEqual(issues[1]['updated_on'], 1457015417)
        self.assertEqual(issues[1]['category'], 'issue')
        self.assertEqual(issues[1]['tag'], 'http://example.com')
        self.assertEqual(issues[1]['data']['key'], 'HELP-6042')
        self.assertEqual(issues[1]['data']['fields']['issuetype']['name'],
                         'extRequest')
        self.assertEqual(issues[1]['data']['fields']['creator']['name'],
                         'user2')
        self.assertEqual(issues[1]['data']['fields']['assignee']['name'],
                         'user1')
        self.assertEqual(
            issues[1]['data']['fields']['customfield_10301']['id'],
            custom_fields['customfield_10301']['id'])
        self.assertEqual(
            issues[1]['data']['fields']['customfield_10301']['name'],
            custom_fields['customfield_10301']['name'])
        self.assertEqual(
            issues[1]['data']['fields']['customfield_10400']['id'],
            custom_fields['customfield_10400']['id'])
        self.assertEqual(
            issues[1]['data']['fields']['customfield_10400']['name'],
            custom_fields['customfield_10400']['name'])
        self.assertEqual(
            issues[1]['data']['fields']['customfield_10600']['id'],
            custom_fields['customfield_10600']['id'])
        self.assertEqual(
            issues[1]['data']['fields']['customfield_10600']['name'],
            custom_fields['customfield_10600']['name'])
        self.assertEqual(
            issues[1]['data']['fields']['customfield_10603']['id'],
            custom_fields['customfield_10603']['id'])
        self.assertEqual(
            issues[1]['data']['fields']['customfield_10603']['name'],
            custom_fields['customfield_10603']['name'])

        self.assertEqual(issues[2]['origin'], 'http://example.com')
        self.assertEqual(issues[2]['uuid'],
                         '2e988d555915991228d81144b018c8321d628265')
        self.assertEqual(issues[2]['updated_on'], 1457006245)
        self.assertEqual(issues[2]['category'], 'issue')
        self.assertEqual(issues[2]['tag'], 'http://example.com')
        self.assertEqual(issues[2]['data']['key'], 'HELP-6041')
        self.assertEqual(issues[2]['data']['fields']['issuetype']['name'],
                         'extRequest')
        self.assertEqual(issues[2]['data']['fields']['creator']['name'],
                         'user2')
        self.assertEqual(issues[2]['data']['fields']['assignee']['name'],
                         'user3')
        self.assertEqual(
            issues[2]['data']['fields']['customfield_10301']['id'],
            custom_fields['customfield_10301']['id'])
        self.assertEqual(
            issues[2]['data']['fields']['customfield_10301']['name'],
            custom_fields['customfield_10301']['name'])
        self.assertEqual(
            issues[2]['data']['fields']['customfield_10400']['id'],
            custom_fields['customfield_10400']['id'])
        self.assertEqual(
            issues[2]['data']['fields']['customfield_10400']['name'],
            custom_fields['customfield_10400']['name'])
        self.assertEqual(
            issues[2]['data']['fields']['customfield_10600']['id'],
            custom_fields['customfield_10600']['id'])
        self.assertEqual(
            issues[2]['data']['fields']['customfield_10600']['name'],
            custom_fields['customfield_10600']['name'])
        self.assertEqual(
            issues[2]['data']['fields']['customfield_10603']['id'],
            custom_fields['customfield_10603']['id'])
        self.assertEqual(
            issues[2]['data']['fields']['customfield_10603']['name'],
            custom_fields['customfield_10603']['name'])
コード例 #10
0
def test_posting_job_data():
    request_manager = RequestManager()
    httpretty.enable()

    _server = "%s" % (config.server)

    _data = {
        "textures": [{
            "name":
            "balcony_1k.hdr",
            "full_path":
            "/home/gaboss/blends/wall/textures/balcony_1k.hdr"
        }, {
            "name":
            "branch_branch_BaseColor.png",
            "full_path":
            "/home/gaboss/blends/wall/textures/branch_branch_BaseColor.png"
        }],
        "scene": {
            "name": "wall.blend",
            "full_path": "/home/gaboss/blends/wall/wall.blend"
        },
        "name":
        "test_job",
        "frames": {
            "start": 0,
            "end": 1
        },
        "anim_prepass":
        False,
        "output_format":
        "jpeg",
        "priority":
        0,
        "sanity_check":
        False,
        "tile_job":
        True,
        "tiles": {
            "padding": 10,
            "y": 2,
            "x": 2
        },
        "tile_padding":
        10
    }

    # Test several server responses
    # - connection failure
    # - server internal error
    # - response for bad request
    # - proper response
    httpretty.register_uri(httpretty.POST,
                           _server,
                           responses=[
                               httpretty.Response(body=timeout_callback,
                                                  status=500),
                               httpretty.Response(body='', status=500),
                               httpretty.Response(body='Bad Request',
                                                  status=400),
                               httpretty.Response(body='Created', status=200)
                           ])

    for i in [0, 1, 2]:
        with pytest.raises(requests.exceptions.RequestException):
            request_manager.post_job_data(_data)

    assert request_manager.post_job_data(_data).text == 'Created'
    assert httpretty.last_request().method == 'POST'
    assert httpretty.last_request(
    ).headers['content-type'] == 'application/json'
    assert json.loads(httpretty.last_request().body) == _data

    httpretty.disable()
コード例 #11
0
    def test_accept_request(self):

        httpretty.register_uri(httpretty.GET,
            APIURL + '/source/openSUSE:Factory/00Meta/lookup.yml',
            status = 404)

        httpretty.register_uri(httpretty.GET,
            APIURL + "/request/770001",
            body = """
                <request id="770001" creator="chameleon">
                  <action type="submit">
                    <source project="Base:System" package="timezone" rev="481ecbe0dfc63ece3a1f1b5598f7d96c"/>
                    <target project="openSUSE:13.2" package="timezone"/>
                  </action>
                  <state name="new" who="factory-source" when="2014-10-08T12:06:07">
                    <comment>...</comment>
                  </state>
                  <review state="new" by_user="******"/>
                  <description>...</description>
                </request>
            """)

        httpretty.register_uri(httpretty.GET,
            APIURL + "/source/Base:System/timezone?view=info&rev=481ecbe0dfc63ece3a1f1b5598f7d96c",
            match_querystring = True,
            body = """
                <sourceinfo package="timezone"
                    rev="481ecbe0dfc63ece3a1f1b5598f7d96c"
                    srcmd5="481ecbe0dfc63ece3a1f1b5598f7d96c"
                    verifymd5="67bac34d29d70553239d33aaf92d2fdd">
                  <filename>timezone.spec</filename>
                </sourceinfo>
            """)

        httpretty.register_uri(httpretty.GET,
            APIURL + "/source/openSUSE:Factory/timezone/_meta",
            body = """
               <package name="timezone" project="openSUSE:Factory">
                 <title>timezone</title>
                 <description></description>
               </package>
            """)

        httpretty.register_uri(httpretty.GET,
            APIURL + "/source/Base:System/timezone/_meta",
            body = """
               <package name="timezone" project="Base:System">
                 <title>timezone</title>
                 <description></description>
               </package>
            """)

        httpretty.register_uri(httpretty.GET,
            APIURL + "/source/openSUSE:Factory/timezone?view=info",
            match_querystring = True,
            body = """
                <sourceinfo package="timezone"
                    rev="89"
                    vrev="1"
                    srcmd5="a36605617cbeefa8168bf0ccf3058074"
                    verifymd5="a36605617cbeefa8168bf0ccf3058074">
                  <filename>timezone.spec</filename>
                </sourceinfo>
            """)

        httpretty.register_uri(httpretty.GET,
            APIURL + "/source/openSUSE:Factory/timezone/_history?limit=5",
            match_querystring = True,
            body = """
                <sourceinfo package="timezone"
                    rev="89"
                    vrev="1"
                    srcmd5="a36605617cbeefa8168bf0ccf3058074"
                    verifymd5="a36605617cbeefa8168bf0ccf3058074">
                  <filename>timezone.spec</filename>
                </sourceinfo>
            """)

        httpretty.register_uri(httpretty.GET,
            APIURL + '/search/request',
            responses = [
                httpretty.Response( body = """
                    <collection matches="1">
                      <request id="254684" creator="chameleon">
                        <action type="submit">
                          <source project="Base:System" package="timezone" rev="481ecbe0dfc63ece3a1f1b5598f7d96c"/>
                          <target project="openSUSE:Factory" package="timezone"/>
                        </action>
                        <state name="review" who="factory-auto" when="2014-10-08T11:55:56">
                          <comment>...</comment>
                        </state>
                        <review state="new" by_group="opensuse-review-team">
                          <comment/>
                        </review>
                        <description> ... </description>
                      </request>
                    </collection>
                    """),
                httpretty.Response( body = """
                    <collection matches="1">
                      <request id="254684" creator="chameleon">
                        <action type="submit">
                          <source project="Base:System" package="timezone" rev="481ecbe0dfc63ece3a1f1b5598f7d96c"/>
                          <target project="openSUSE:Factory" package="timezone"/>
                        </action>
                        <state name="new" who="factory-auto" when="2014-10-08T11:55:56">
                          <comment>...</comment>
                        </state>
                        <description> ... </description>
                      </request>
                    </collection>
                    """)
                ])

        result = { 'status': None }

        def change_request(result, method, uri, headers):
            query = parse_qs(urlparse(uri).query)
            if query == {'by_user': ['factory-source'], 'cmd': ['changereviewstate'], 'newstate': ['accepted']}:
                result['status'] = True
            else:
                result['status'] = 'ERROR'
            return (200, headers, '<status code="blah"/>')

        httpretty.register_uri(httpretty.POST,
            APIURL + "/request/770001",
            body = lambda method, uri, headers: change_request(result, method, uri, headers))

        # first time request is in in review
        self.checker.set_request_ids(['770001'])
        self.checker.check_requests()

        self.assertEqual(result['status'], None)

        # second time request is in state new so we can accept
        self.checker.set_request_ids(['770001'])
        self.checker.check_requests()

        self.assertTrue(result['status'])
コード例 #12
0
    def test_fetch_from_cache(self):
        """Test whether the cache works"""

        requests = []
        bodies_csv = [read_file('data/bugzilla_buglist.csv'),
                      read_file('data/bugzilla_buglist_next.csv'),
                      ""]
        bodies_xml = [read_file('data/bugzilla_version.xml', mode='rb'),
                      read_file('data/bugzilla_bugs_details.xml', mode='rb'),
                      read_file('data/bugzilla_bugs_details_next.xml', mode='rb')]
        bodies_html = [read_file('data/bugzilla_bug_activity.html', mode='rb'),
                       read_file('data/bugzilla_bug_activity_empty.html', mode='rb')]

        def request_callback(method, uri, headers):
            if uri.startswith(BUGZILLA_BUGLIST_URL):
                body = bodies_csv.pop(0)
            elif uri.startswith(BUGZILLA_BUG_URL):
                body = bodies_xml.pop(0)
            else:
                body = bodies_html[len(requests) % 2]

            requests.append(httpretty.last_request())

            return (200, headers, body)

        httpretty.register_uri(httpretty.GET,
                               BUGZILLA_BUGLIST_URL,
                               responses=[
                                   httpretty.Response(body=request_callback)
                                   for _ in range(3)
                               ])
        httpretty.register_uri(httpretty.GET,
                               BUGZILLA_BUG_URL,
                               responses=[
                                   httpretty.Response(body=request_callback)
                                   for _ in range(2)
                               ])
        httpretty.register_uri(httpretty.GET,
                               BUGZILLA_BUG_ACTIVITY_URL,
                               responses=[
                                   httpretty.Response(body=request_callback)
                                   for _ in range(7)
                               ])

        # First, we fetch the bugs from the server, storing them
        # in a cache
        cache = Cache(self.tmp_path)
        bg = Bugzilla(BUGZILLA_SERVER_URL, max_bugs=5, cache=cache)

        bugs = [bug for bug in bg.fetch()]
        self.assertEqual(len(requests), 13)

        # Now, we get the bugs from the cache.
        # The contents should be the same and there won't be
        # any new request to the server
        cached_bugs = [bug for bug in bg.fetch_from_cache()]
        self.assertEqual(len(cached_bugs), len(bugs))

        self.assertEqual(len(cached_bugs), 7)

        self.assertEqual(cached_bugs[0]['data']['bug_id'][0]['__text__'], '15')
        self.assertEqual(len(cached_bugs[0]['data']['activity']), 0)
        self.assertEqual(cached_bugs[0]['origin'], BUGZILLA_SERVER_URL)
        self.assertEqual(cached_bugs[0]['uuid'], '5a8a1e25dfda86b961b4146050883cbfc928f8ec')
        self.assertEqual(cached_bugs[0]['updated_on'], 1248276445.0)
        self.assertEqual(cached_bugs[0]['category'], 'bug')
        self.assertEqual(cached_bugs[0]['tag'], BUGZILLA_SERVER_URL)

        self.assertEqual(cached_bugs[6]['data']['bug_id'][0]['__text__'], '888')
        self.assertEqual(len(cached_bugs[6]['data']['activity']), 14)
        self.assertEqual(cached_bugs[6]['origin'], BUGZILLA_SERVER_URL)
        self.assertEqual(cached_bugs[6]['uuid'], 'b4009442d38f4241a4e22e3e61b7cd8ef5ced35c')
        self.assertEqual(cached_bugs[6]['updated_on'], 1439404330.0)
        self.assertEqual(cached_bugs[6]['category'], 'bug')
        self.assertEqual(cached_bugs[6]['tag'], BUGZILLA_SERVER_URL)

        self.assertEqual(len(requests), 13)
コード例 #13
0
def setup_http_server():
    """Setup a mock HTTP server"""

    http_requests = []

    issues_body = read_file('data/redmine/redmine_issues.json', 'rb')
    issues_next_body = read_file('data/redmine/redmine_issues_next.json', 'rb')
    issues_empty_body = read_file('data/redmine/redmine_issues_empty.json', 'rb')
    issue_2_body = read_file('data/redmine/redmine_issue_2.json', 'rb')
    issue_5_body = read_file('data/redmine/redmine_issue_5.json', 'rb')
    issue_9_body = read_file('data/redmine/redmine_issue_9.json', 'rb')
    issue_7311_body = read_file('data/redmine/redmine_issue_7311.json', 'rb')

    def request_callback(method, uri, headers):
        last_request = httpretty.last_request()
        params = last_request.querystring

        if uri.startswith(REDMINE_ISSUES_URL):
            if params['updated_on'][0] == '>=1970-01-01T00:00:00Z' and \
                params['offset'][0] == '0':
                body = issues_body
            elif params['updated_on'][0] == '>=1970-01-01T00:00:00Z' and \
                params['offset'][0] == '3':
                body = issues_next_body
            elif params['updated_on'][0] == '>=2016-07-27T00:00:00Z' and \
                params['offset'][0] == '0':
                body = issues_next_body
            else:
                body = issues_empty_body
        elif uri.startswith(REDMINE_ISSUE_2_URL):
            body = issue_2_body
        elif uri.startswith(REDMINE_ISSUE_5_URL):
            body = issue_5_body
        elif uri.startswith(REDMINE_ISSUE_9_URL):
            body = issue_9_body
        elif uri.startswith(REDMINE_ISSUE_7311_URL):
            body = issue_7311_body
        else:
            raise

        http_requests.append(last_request)

        return (200, headers, body)

    httpretty.register_uri(httpretty.GET,
                           REDMINE_ISSUES_URL,
                           responses=[
                                httpretty.Response(body=request_callback)
                           ])
    httpretty.register_uri(httpretty.GET,
                           REDMINE_ISSUE_2_URL,
                           responses=[
                                httpretty.Response(body=request_callback)
                           ])
    httpretty.register_uri(httpretty.GET,
                           REDMINE_ISSUE_5_URL,
                           responses=[
                                httpretty.Response(body=request_callback)
                           ])
    httpretty.register_uri(httpretty.GET,
                           REDMINE_ISSUE_9_URL,
                           responses=[
                                httpretty.Response(body=request_callback)
                           ])
    httpretty.register_uri(httpretty.GET,
                           REDMINE_ISSUE_7311_URL,
                           responses=[
                                httpretty.Response(body=request_callback)
                           ])

    return http_requests
コード例 #14
0
    def test_get_issues(self):
        """Test get issues API call"""

        from_date = str_to_datetime('2015-01-01')

        requests = []

        bodies_json = [
            read_file('data/jira/jira_issues_page_1.json'),
            read_file('data/jira/jira_issues_page_2.json')
        ]

        bodies = bodies_json[:]
        bodies = list(bodies_json)

        def request_callback(method, uri, headers):
            body = bodies.pop(0)
            requests.append(httpretty.last_request())
            return 200, headers, body

        httpretty.register_uri(httpretty.GET,
                               JIRA_SEARCH_URL,
                               responses=[
                                   httpretty.Response(body=request_callback)
                                   for _ in range(2)
                               ])

        client = JiraClient(url='http://example.com',
                            project='perceval',
                            user='******',
                            password='******',
                            verify=False,
                            cert=None,
                            max_results=2)

        pages = [page for page in client.get_issues(from_date)]

        expected_req = [{
            'expand': ['renderedFields,transitions,operations,changelog'],
            'jql': [
                'project = perceval AND updated > 1420070400000 order by updated asc'
            ],
            'maxResults': ['2'],
            'startAt': ['0']
        }, {
            'expand': ['renderedFields,transitions,operations,changelog'],
            'jql': [
                'project = perceval AND updated > 1420070400000 order by updated asc'
            ],
            'maxResults': ['2'],
            'startAt': ['2']
        }]

        self.assertEqual(len(pages), 2)

        self.assertEqual(requests[0].method, 'GET')
        self.assertRegex(requests[0].path, '/rest/api/2/search')
        self.assertDictEqual(requests[0].querystring, expected_req[0])

        self.assertEqual(requests[1].method, 'GET')
        self.assertRegex(requests[1].path, '/rest/api/2/search')
        self.assertDictEqual(requests[1].querystring, expected_req[1])

        self.assertEqual(pages[0], bodies_json[0])
        self.assertEqual(pages[1], bodies_json[1])
コード例 #15
0
    def test_simple_scan(self):
        #
        # Mock all HTTP responses
        #
        httpretty.register_uri(httpretty.GET,
                               self.get_url('/'),
                               body=INDEX_RESPONSE,
                               content_type='application/json')

        httpretty.register_uri(httpretty.GET,
                               self.get_url('/version'),
                               body=VERSION_RESPONSE,
                               content_type='application/json')

        httpretty.register_uri(httpretty.POST,
                               self.get_url('/scans/'),
                               body=SCAN_START_RESPONSE,
                               content_type='application/json',
                               status=201)

        httpretty.register_uri(httpretty.GET,
                               self.get_url('/scans/0/status'),
                               body=SCAN_STATUS_RESPONSE,
                               content_type='application/json')

        httpretty.register_uri(httpretty.GET,
                               self.get_url('/scans/1/status'),
                               body=NOT_FOUND,
                               content_type='application/json',
                               status=404)

        httpretty.register_uri(
            httpretty.GET,
            self.get_url('/scans/0/log'),
            responses=[
                #
                #    Responses for ?page pagination
                #
                httpretty.Response(body=LOG_RESPONSE,
                                   content_type='application/json',
                                   status=200),
                httpretty.Response(body=EMPTY_LOG_RESPONSE,
                                   content_type='application/json',
                                   status=200),
                #
                #    Responses for ?id=0 pagination
                #
                httpretty.Response(body=LOG_RESPONSE,
                                   content_type='application/json',
                                   status=200),
                httpretty.Response(body=EMPTY_LOG_RESPONSE,
                                   content_type='application/json',
                                   status=200),
            ])

        httpretty.register_uri(httpretty.GET,
                               self.get_url('/scans/0/kb/'),
                               body=FINDINGS_RESPONSE,
                               content_type='application/json')

        httpretty.register_uri(httpretty.GET,
                               self.get_url('/scans/0/kb/0'),
                               body=FINDINGS_DETAIL_RESPONSE,
                               content_type='application/json')

        httpretty.register_uri(httpretty.GET,
                               self.get_url('/scans/0/traffic/45'),
                               body=TRAFFIC_DETAIL_RESPONSE_45,
                               content_type='application/json')

        httpretty.register_uri(httpretty.GET,
                               self.get_url('/scans/0/traffic/46'),
                               body=TRAFFIC_DETAIL_RESPONSE_46,
                               content_type='application/json')

        conn = Connection(self.api_url)
        #conn.set_verbose(True)

        self.assertTrue(conn.can_access_api())

        #
        #   Start a scan and assert
        #
        scan = Scan(conn)
        self.assertIsNone(scan.scan_id)

        scan.start('mock_profile', [TARGET_URL])

        self.assertJSONEquals(httpretty.last_request(), SCAN_START_REQUEST)
        self.assertEqual(scan.scan_id, 0)

        #
        #   Get scan status
        #
        json_data = scan.get_status()

        self.assertEqual(json_data['is_running'], True)
        self.assertEqual(json_data['is_paused'], False)
        self.assertEqual(json_data['exception'], None)

        #
        #   Test the error handling
        #
        scan.scan_id = 1
        self.assertRaises(APIException, scan.get_status)

        scan.scan_id = 0

        #
        #   Get the log
        #
        log = scan.get_log()
        self.assertIsInstance(log, Log)

        expected_log_entries = [
            LogEntry('debug', 'one', '23-Jun-2015 16:21', None, 0),
            LogEntry('vulnerability', 'two', '23-Jun-2015 16:22', 'High', 1)
        ]
        received_log_entries = []

        for log_entry in log:
            self.assertIsInstance(log_entry, LogEntry)
            received_log_entries.append(log_entry)

        self.assertEqual(received_log_entries, expected_log_entries)

        #
        #   Get the log using the ids
        #
        log = scan.get_log()
        self.assertIsInstance(log, Log)

        expected_log_entries = [
            LogEntry('debug', 'one', '23-Jun-2015 16:21', None, 0),
            LogEntry('vulnerability', 'two', '23-Jun-2015 16:22', 'High', 1)
        ]
        received_log_entries = []

        for log_entry in log.get_by_start_id(0):
            self.assertIsInstance(log_entry, LogEntry)
            received_log_entries.append(log_entry)

        self.assertEqual(received_log_entries, expected_log_entries)

        #
        #   Get the vulnerabilities
        #
        findings = scan.get_findings()
        self.assertIsInstance(findings, list)
        self.assertEqual(len(findings), 1)

        finding = findings[0]
        self.assertEqual(finding.name, 'SQL injection')
        self.assertIsInstance(finding, Finding)

        all_traffic = finding.get_traffic()
        self.assertIsInstance(all_traffic, list)
        self.assertEqual(len(all_traffic), 2)

        traffic = all_traffic[0]
        self.assertIn('GET ', traffic.get_request())
        self.assertIn('<html>', traffic.get_response())
コード例 #16
0
    def test_fetch_from_cache(self):
        """Test whether a list of issues is returned from cache"""

        requests = []

        bodies_json = [
            read_file('data/jira/jira_issues_page_1.json'),
            read_file('data/jira/jira_issues_page_2.json')
        ]

        body = read_file('data/jira/jira_fields.json')

        def request_callback(method, uri, headers):
            body = bodies_json.pop(0)
            requests.append(httpretty.last_request())
            return (200, headers, body)

        httpretty.register_uri(httpretty.GET,
                               JIRA_SEARCH_URL,
                               responses=[httpretty.Response(body=request_callback) \
                                          for _ in range(2)])
        httpretty.register_uri(httpretty.GET,
                               JIRA_FIELDS_URL,
                               body=body,
                               status=200)

        # First, we fetch the issues from the server, storing them
        # in a cache
        cache = Cache(self.tmp_path)
        jira = Jira(JIRA_SERVER_URL, cache=cache)

        issues = [issue for issue in jira.fetch()]
        del issues[0]['timestamp']

        # Now, we get the issues from the cache.
        # The contents should be the same and there won't be
        # any new request to the server
        cache_issues = [cache_issue for cache_issue in jira.fetch_from_cache()]

        expected_req = [{
            'expand': ['renderedFields,transitions,operations,changelog'],
            'jql': [' updated > 0'],
            'startAt': ['0']
        }, {
            'expand': ['renderedFields,transitions,operations,changelog'],
            'jql': [' updated > 0'],
            'startAt': ['2']
        }]

        for i in range(len(expected_req)):
            self.assertEqual(requests[i].method, 'GET')
            self.assertRegex(requests[i].path, '/rest/api/2/search')
            self.assertDictEqual(requests[i].querystring, expected_req[i])

        self.assertEqual(len(issues), len(cache_issues))

        for i in range(len(cache_issues)):
            self.assertEqual(issues[i]['origin'], cache_issues[i]['origin'])
            self.assertEqual(issues[i]['uuid'], cache_issues[i]['uuid'])
            self.assertEqual(issues[i]['updated_on'],
                             cache_issues[i]['updated_on'])
            self.assertEqual(issues[i]['category'],
                             cache_issues[i]['category'])
            self.assertEqual(issues[1]['tag'], cache_issues[i]['tag'])
            self.assertEqual(issues[i]['data']['key'],
                             cache_issues[i]['data']['key'])
            self.assertEqual(
                issues[i]['data']['fields']['issuetype']['name'],
                cache_issues[i]['data']['fields']['issuetype']['name'])
            self.assertEqual(
                issues[i]['data']['fields']['creator']['name'],
                cache_issues[i]['data']['fields']['creator']['name'])
            self.assertEqual(
                issues[i]['data']['fields']['assignee']['name'],
                cache_issues[i]['data']['fields']['assignee']['name'])
コード例 #17
0
    def test_fetch_auth(self):
        """Test whether authentication works"""

        requests = []
        bodies_csv = [read_file('data/bugzilla/bugzilla_buglist_next.csv'), ""]
        bodies_xml = [
            read_file('data/bugzilla/bugzilla_version.xml', mode='rb'),
            read_file('data/bugzilla/bugzilla_bugs_details_next.xml',
                      mode='rb')
        ]
        bodies_html = [
            read_file('data/bugzilla/bugzilla_bug_activity.html', mode='rb'),
            read_file('data/bugzilla/bugzilla_bug_activity_empty.html',
                      mode='rb')
        ]

        def request_callback(method, uri, headers):
            if uri.startswith(BUGZILLA_LOGIN_URL):
                body = "index.cgi?logout=1"
            elif uri.startswith(BUGZILLA_BUGLIST_URL):
                body = bodies_csv.pop(0)
            elif uri.startswith(BUGZILLA_BUG_URL):
                body = bodies_xml.pop(0)
            else:
                body = bodies_html[(len(requests) + 1) % 2]

            requests.append(httpretty.last_request())

            return (200, headers, body)

        httpretty.register_uri(
            httpretty.POST,
            BUGZILLA_LOGIN_URL,
            responses=[httpretty.Response(body=request_callback)])
        httpretty.register_uri(httpretty.GET,
                               BUGZILLA_BUGLIST_URL,
                               responses=[
                                   httpretty.Response(body=request_callback)
                                   for _ in range(2)
                               ])
        httpretty.register_uri(
            httpretty.GET,
            BUGZILLA_BUG_URL,
            responses=[httpretty.Response(body=request_callback)])
        httpretty.register_uri(httpretty.GET,
                               BUGZILLA_BUG_ACTIVITY_URL,
                               responses=[
                                   httpretty.Response(body=request_callback)
                                   for _ in range(2)
                               ])

        from_date = datetime.datetime(2015, 1, 1)

        bg = Bugzilla(BUGZILLA_SERVER_URL,
                      user='******',
                      password='******')
        bugs = [bug for bug in bg.fetch(from_date=from_date)]

        self.assertEqual(len(bugs), 2)
        self.assertEqual(bugs[0]['data']['bug_id'][0]['__text__'], '30')
        self.assertEqual(len(bugs[0]['data']['activity']), 14)
        self.assertEqual(bugs[0]['origin'], BUGZILLA_SERVER_URL)
        self.assertEqual(bugs[0]['uuid'],
                         '4b166308f205121bc57704032acdc81b6c9bb8b1')
        self.assertEqual(bugs[0]['updated_on'], 1426868155.0)
        self.assertEqual(bugs[0]['category'], 'bug')
        self.assertEqual(bugs[0]['tag'], BUGZILLA_SERVER_URL)

        self.assertEqual(bugs[1]['data']['bug_id'][0]['__text__'], '888')
        self.assertEqual(len(bugs[1]['data']['activity']), 0)
        self.assertEqual(bugs[1]['origin'], BUGZILLA_SERVER_URL)
        self.assertEqual(bugs[1]['uuid'],
                         'b4009442d38f4241a4e22e3e61b7cd8ef5ced35c')
        self.assertEqual(bugs[1]['updated_on'], 1439404330.0)
        self.assertEqual(bugs[1]['category'], 'bug')
        self.assertEqual(bugs[1]['tag'], BUGZILLA_SERVER_URL)

        # Check requests
        auth_expected = {
            'Bugzilla_login': ['*****@*****.**'],
            'Bugzilla_password': ['1234'],
            'GoAheadAndLogIn': ['Log in']
        }
        expected = [{
            'ctype': ['xml']
        }, {
            'ctype': ['csv'],
            'limit': ['10000'],
            'order': ['changeddate'],
            'chfieldfrom': ['2015-01-01 00:00:00']
        }, {
            'ctype': ['csv'],
            'limit': ['10000'],
            'order': ['changeddate'],
            'chfieldfrom': ['2015-08-12 18:32:11']
        }, {
            'ctype': ['xml'],
            'id': ['30', '888'],
            'excludefield': ['attachmentdata']
        }, {
            'id': ['30']
        }, {
            'id': ['888']
        }]

        # Check authentication request
        auth_req = requests.pop(0)
        self.assertDictEqual(auth_req.parsed_body, auth_expected)

        # Check the rests of the headers
        self.assertEqual(len(requests), len(expected))

        for i in range(len(expected)):
            self.assertDictEqual(requests[i].querystring, expected[i])
コード例 #18
0
def setup_http_server():
    http_requests = []

    bodies_bugs = [
        read_file('data/bugzilla/bugzilla_rest_bugs.json', mode='rb'),
        read_file('data/bugzilla/bugzilla_rest_bugs_next.json', mode='rb'),
        read_file('data/bugzilla/bugzilla_rest_bugs_empty.json', mode='rb')
    ]
    body_comments = [
        read_file('data/bugzilla/bugzilla_rest_bugs_comments.json', mode='rb'),
        read_file('data/bugzilla/bugzilla_rest_bugs_comments_empty.json',
                  mode='rb')
    ]
    body_history = [
        read_file('data/bugzilla/bugzilla_rest_bugs_history.json', mode='rb'),
        read_file('data/bugzilla/bugzilla_rest_bugs_history_empty.json',
                  mode='rb')
    ]
    body_attachments = [
        read_file('data/bugzilla/bugzilla_rest_bugs_attachments.json',
                  mode='rb'),
        read_file('data/bugzilla/bugzilla_rest_bugs_attachments_empty.json',
                  mode='rb')
    ]

    def request_callback(method, uri, headers):
        if uri.startswith(BUGZILLA_BUGS_COMMENTS_1273442_URL):
            body = body_comments[0]
        elif uri.startswith(BUGZILLA_BUGS_HISTORY_1273442_URL):
            body = body_history[0]
        elif uri.startswith(BUGZILLA_BUGS_ATTACHMENTS_1273442_URL):
            body = body_attachments[0]
        elif uri.startswith(BUGZILLA_BUG_947945_URL):
            if uri.find('comment') > 0:
                body = body_comments[1]
            elif uri.find('history') > 0:
                body = body_history[1]
            else:
                body = body_attachments[1]
        else:
            body = bodies_bugs.pop(0)

        http_requests.append(httpretty.last_request())

        return (200, headers, body)

    httpretty.register_uri(httpretty.GET,
                           BUGZILLA_BUGS_URL,
                           responses=[
                               httpretty.Response(body=request_callback)
                               for _ in range(3)
                           ])

    http_urls = [
        BUGZILLA_BUGS_COMMENTS_1273442_URL, BUGZILLA_BUGS_HISTORY_1273442_URL,
        BUGZILLA_BUGS_ATTACHMENTS_1273442_URL
    ]

    suffixes = ['comment', 'history', 'attachment']

    for http_url in [BUGZILLA_BUG_947945_URL]:
        for suffix in suffixes:
            http_urls.append(http_url + suffix)

    for req_url in http_urls:
        httpretty.register_uri(
            httpretty.GET,
            req_url,
            responses=[httpretty.Response(body=request_callback)])

    return http_requests
コード例 #19
0
ファイル: WWWoman.py プロジェクト: kavod/WWWoman
    def register_uri(self,
        uri,
        body='WWWoman 8)',
        template=None,
        method=httpretty.GET,
        adding_headers=None,
        forcing_headers=None,
        status=200,
        responses=None,
        match_querystring=False,
        priority=0,
        baseuri=None,
        templatedir=None,
        **headers):
        """
        Register a new URI.
        Parameters are the same than ``register_uri`` method of httpretty class\
        except the extra parameters below

        .. seealso:: \
        `httpretty documentation <https://github.com/gabrielfalcao/HTTPretty>`_\
        for regular parameters of ``register_uri``

        :param template: ``template`` can be a path or a list of path to a \
        template file.\
        If template is not None, the ``body`` or the ``responses`` parameters\
        will be ignored in favor of the content of the template file(s).

        :type template: string or list of strings

        :type baseuri: string
        
        :param baseuri: if ``baseuri`` is not None, relative uri path could be\
        entered instead of complete URI.

        .. note:: if ``uri`` is an absolute uri, the ``baseuri`` is ignored

        :param templatedir: if ``templatedir`` is note None, ``template``\
        relative path will be used as base instead of working directory (cwd)
        :type templatedir: string

        :Example:

        >>> import httpretty
        >>> import requests
        >>> import wwwoman
        >>> httpretty.httpretty.enable()
        >>> # Normal use:
        >>> wwwoman.wwwoman.register_uri(uri="http://test.io",body="My test!")
        >>> requests.get('http://test.io').content
        'My test!'
        >>> # Use of ``baseuri`` parameter
        >>> wwwoman.wwwoman.register_uri(
                uri='test.html',
                baseuri='http://test.io',
                body="My second test"
            )
        >>> requests.get('http://test.io/test.html').content
        'My second test'
        >>> # Use of ``template`` parameter
        >>> ## test.io will return the content of ``tests/templates/index.html``
        >>> wwwoman.wwwoman.register_uri(
                uri='http://test.io',
                template='index.html',
                templatedir='tests/templates'
            )
        'The index.html content'
        """
        if templatedir is None:
            templatedir = self.template_path
        if template is not None:
            if isinstance(template,list):
                responses = []
                for item in template:
                    if isinstance(item,basestring):
                        item = _translatePath(templatedir,item)
                        with open(item,'rb') as fd:
                            body=fd.read()
                        responses.append(httpretty.Response(body=body))
                    elif isinstance(item,dict):
                        responses.append(httpretty.Response(**item))
            else:
                template = _translatePath(templatedir,template)
                with open(template,'rb') as fd:
                    body=fd.read()

        httpretty.register_uri(
            method=method,
            uri=urlparse.urljoin(baseuri,uri),
            body=body,
            adding_headers=adding_headers,
            forcing_headers=forcing_headers,
            status=status,
            responses=responses,
            match_querystring=match_querystring,
            priority=priority,
            headers=headers
        )
コード例 #20
0
    def routes(cls, version="1.28", empty=False, response_num=200):
        """Configure in http the routes to be served"""

        assert (version in TESTED_VERSIONS)

        if version == "1.28":
            mediawiki_siteinfo = read_file(
                'data/mediawiki/mediawiki_siteinfo_1.28.json')
        elif version == "1.23":
            mediawiki_siteinfo = read_file(
                'data/mediawiki/mediawiki_siteinfo_1.23.json')
        mediawiki_namespaces = read_file(
            'data/mediawiki/mediawiki_namespaces.json')
        # For >= 1.27 in full and incremental mode, the same file
        mediawiki_pages_allrevisions = read_file(
            'data/mediawiki/mediawiki_pages_allrevisions.json')
        if empty:
            mediawiki_pages_allrevisions = read_file(
                'data/mediawiki/mediawiki_pages_allrevisions_empty.json')

        # For < 1.27 in full download
        mediawiki_pages_all = read_file(
            'data/mediawiki/mediawiki_pages_all.json')
        if empty:
            mediawiki_pages_all = read_file(
                'data/mediawiki/mediawiki_pages_all_empty.json')

        # For < 1.27 in incremental download
        mediawiki_pages_recent_changes = read_file(
            'data/mediawiki/mediawiki_pages_recent_changes.json')

        # Pages with revisions
        mediawiki_page_476583 = read_file(
            'data/mediawiki/mediawiki_page_476583_revisions.json')
        mediawiki_page_592384 = read_file(
            'data/mediawiki/mediawiki_page_592384_revisions.json')
        mediawiki_page_476589 = read_file(
            'data/mediawiki/mediawiki_page_476589_revisions.json')
        mediawiki_page_476590 = read_file(
            'data/mediawiki/mediawiki_page_476590_revisions.json')

        def request_callback(method, uri, headers):
            params = urllib.parse.parse_qs(urllib.parse.urlparse(uri).query)
            if 'meta' in params and 'siteinfo' in params['meta']:
                body = mediawiki_siteinfo
                if 'siprop' in params:
                    body = mediawiki_namespaces
            elif 'list' in params:
                if 'allpages' in params['list']:
                    body = mediawiki_pages_all
                elif 'recentchanges' in params['list']:
                    body = mediawiki_pages_recent_changes
                elif 'allrevisions' in params['list']:
                    body = mediawiki_pages_allrevisions
            elif 'pageids' in params:
                if params['pageids'][0] == '476583':
                    body = mediawiki_page_476583
                elif params['pageids'][0] == '592384':
                    body = mediawiki_page_592384
                elif params['pageids'][0] == '476589':
                    body = mediawiki_page_476589
                elif params['pageids'][0] == '476590':
                    body = mediawiki_page_476590
            else:
                raise

            HTTPServer.requests_http.append(httpretty.last_request())

            return (response_num, headers, body)

        httpretty.register_uri(
            httpretty.GET,
            MEDIAWIKI_API,
            responses=[httpretty.Response(body=request_callback)])
コード例 #21
0
    def test_fetch_pinned(self):
        """Test whether the right list of topics is returned when some topics are pinned"""

        bodies_topics = [read_file('data/discourse/discourse_topics_pinned.json'),
                         read_file('data/discourse/discourse_topics_empty.json')]
        body_topic_1148 = read_file('data/discourse/discourse_topic_1148.json')
        body_topic_1149 = read_file('data/discourse/discourse_topic_1149.json')
        body_topic_1150 = read_file('data/discourse/discourse_topic_1150.json')
        body_post = read_file('data/discourse/discourse_post.json')

        def request_callback(method, uri, headers):
            if uri.startswith(DISCOURSE_TOPICS_URL):
                body = bodies_topics.pop(0)
            elif uri.startswith(DISCOURSE_TOPIC_URL_1148):
                body = body_topic_1148
            elif uri.startswith(DISCOURSE_TOPIC_URL_1149):
                body = body_topic_1149
            elif uri.startswith(DISCOURSE_TOPIC_URL_1150):
                body = body_topic_1150
            elif uri.startswith(DISCOURSE_POST_URL_1) or \
                    uri.startswith(DISCOURSE_POST_URL_2):
                body = body_post
            else:
                raise
            return (200, headers, body)

        httpretty.register_uri(httpretty.GET,
                               DISCOURSE_TOPICS_URL,
                               responses=[
                                   httpretty.Response(body=request_callback)
                                   for _ in range(2)
                               ])
        httpretty.register_uri(httpretty.GET,
                               DISCOURSE_TOPIC_URL_1148,
                               responses=[
                                   httpretty.Response(body=request_callback)
                               ])
        httpretty.register_uri(httpretty.GET,
                               DISCOURSE_TOPIC_URL_1149,
                               responses=[
                                   httpretty.Response(body=request_callback)
                               ])
        httpretty.register_uri(httpretty.GET,
                               DISCOURSE_TOPIC_URL_1150,
                               responses=[
                                   httpretty.Response(body=request_callback)
                               ])
        httpretty.register_uri(httpretty.GET,
                               DISCOURSE_POST_URL_1,
                               responses=[
                                   httpretty.Response(body=request_callback)
                               ])
        httpretty.register_uri(httpretty.GET,
                               DISCOURSE_POST_URL_2,
                               responses=[
                                   httpretty.Response(body=request_callback)
                               ])

        # On this tests two topics will be retrieved.
        # One of them was pinned but the date is in range.
        from_date = datetime.datetime(2016, 5, 25, 2, 0, 0)

        discourse = Discourse(DISCOURSE_SERVER_URL)
        topics = [topic for topic in discourse.fetch(from_date=from_date)]

        self.assertEqual(len(topics), 2)

        self.assertEqual(topics[0]['data']['id'], 1148)
        self.assertEqual(len(topics[0]['data']['post_stream']['posts']), 22)
        self.assertEqual(topics[0]['origin'], DISCOURSE_SERVER_URL)
        self.assertEqual(topics[0]['uuid'], '5298e4e8383c3f73c9fa7c9599779cbe987a48e4')
        self.assertEqual(topics[0]['updated_on'], 1464144769.526)
        self.assertEqual(topics[0]['category'], 'topic')
        self.assertEqual(topics[0]['tag'], DISCOURSE_SERVER_URL)

        self.assertEqual(topics[1]['data']['id'], 1150)
        self.assertEqual(len(topics[1]['data']['post_stream']['posts']), 2)
        self.assertEqual(topics[1]['origin'], DISCOURSE_SERVER_URL)
        self.assertEqual(topics[1]['uuid'], '373b597a2a389112875c3e544f197610373a7283')
        self.assertEqual(topics[1]['updated_on'], 1464274870.809)
        self.assertEqual(topics[1]['category'], 'topic')
        self.assertEqual(topics[0]['tag'], DISCOURSE_SERVER_URL)
コード例 #22
0
def test_scenario_2(d=None):
    fpath = opj(d, 'crap.txt')

    credential = FakeCredential2(name='test', url=None)
    credentials = credential()
    authenticator = HTMLFormAuthenticator(
        dict(username="******", password="******", submit="CustomLogin"))

    def request_get_with_expired_cookie_callback(request, uri, headers):
        assert_in('Cookie', request.headers)
        cookie_vals = request.headers['Cookie'].split('; ')
        for v in cookie_vals:
            if v.startswith('expires'):
                expiration_date = v.split('=')[1]
                expiration_epoch_time = timegm(
                    time.strptime(expiration_date,
                                  "%a, %d %b %Y %H:%M:%S GMT"))
                assert_greater(time.time(), expiration_epoch_time)
        return (403, headers, "cookie was expired")

    def request_post_callback(request, uri, headers):
        post_params = request.parsed_body
        assert_equal(credentials['password'], post_params['password'][0])
        assert_equal(credentials['user'], post_params['username'][0])
        assert_not_in('Cookie', request.headers)
        return (200, headers,
                "Got {} response from {}".format(request.method, uri))

    def request_get_callback(request, uri, headers):
        assert_equal(request.body, b'')
        assert_in('Cookie', request.headers)
        assert_equal(request.headers['Cookie'], test_cookie)
        return (200, headers, "correct body")

    # SCENARIO 2
    # outdated cookie provided to GET, return 403 (access denied)
    # then like SCENARIO 1 again:
    # POST credentials and get a new cookie
    # which is then provided to a GET request
    httpretty.register_uri(
        httpretty.GET,
        url,
        responses=[
            httpretty.Response(body=request_get_with_expired_cookie_callback),
            httpretty.Response(body=request_get_callback),
        ])

    # callback to verify that correct credentials are provided
    # and then returns the cookie to test again for 'GET'ing
    httpretty.register_uri(httpretty.POST,
                           url,
                           body=request_post_callback,
                           set_cookie=test_cookie)
    # then in another GET is performed to verify that correct cookie was provided and
    # that no credentials are there

    downloader = HTTPDownloader(credential=credential,
                                authenticator=authenticator)
    downloader.download(url, path=d)

    content = read_file(fpath)
    assert_equal(content, "correct body")
コード例 #23
0
    def test_fetch(self):
        """Test whether a list of topics is returned"""

        requests_http = []

        bodies_topics = [read_file('data/discourse/discourse_topics.json'),
                         read_file('data/discourse/discourse_topics_empty.json')]
        body_topic_1148 = read_file('data/discourse/discourse_topic_1148.json')
        body_topic_1149 = read_file('data/discourse/discourse_topic_1149.json')
        body_post = read_file('data/discourse/discourse_post.json')

        def request_callback(method, uri, headers):
            if uri.startswith(DISCOURSE_TOPICS_URL):
                body = bodies_topics.pop(0)
            elif uri.startswith(DISCOURSE_TOPIC_URL_1148):
                body = body_topic_1148
            elif uri.startswith(DISCOURSE_TOPIC_URL_1149):
                body = body_topic_1149
            elif uri.startswith(DISCOURSE_POST_URL_1) or \
                    uri.startswith(DISCOURSE_POST_URL_2):
                body = body_post
            else:
                raise

            requests_http.append(httpretty.last_request())

            return (200, headers, body)

        httpretty.register_uri(httpretty.GET,
                               DISCOURSE_TOPICS_URL,
                               responses=[
                                   httpretty.Response(body=request_callback)
                                   for _ in range(2)
                               ])
        httpretty.register_uri(httpretty.GET,
                               DISCOURSE_TOPIC_URL_1148,
                               responses=[
                                   httpretty.Response(body=request_callback)
                               ])
        httpretty.register_uri(httpretty.GET,
                               DISCOURSE_TOPIC_URL_1149,
                               responses=[
                                   httpretty.Response(body=request_callback)
                               ])
        httpretty.register_uri(httpretty.GET,
                               DISCOURSE_POST_URL_1,
                               responses=[
                                   httpretty.Response(body=request_callback)
                               ])
        httpretty.register_uri(httpretty.GET,
                               DISCOURSE_POST_URL_2,
                               responses=[
                                   httpretty.Response(body=request_callback)
                               ])

        # Test fetch topics
        discourse = Discourse(DISCOURSE_SERVER_URL)
        topics = [topic for topic in discourse.fetch()]

        self.assertEqual(len(topics), 2)

        # Topics are returned in reverse order
        # from oldest to newest
        self.assertEqual(topics[0]['data']['id'], 1149)
        self.assertEqual(len(topics[0]['data']['post_stream']['posts']), 2)
        self.assertEqual(topics[0]['origin'], DISCOURSE_SERVER_URL)
        self.assertEqual(topics[0]['uuid'], '18068b95de1323a84c8e11dee8f46fd137f10c86')
        self.assertEqual(topics[0]['updated_on'], 1464134770.909)
        self.assertEqual(topics[0]['category'], "topic")
        self.assertEqual(topics[0]['tag'], DISCOURSE_SERVER_URL)

        self.assertEqual(topics[1]['data']['id'], 1148)
        self.assertEqual(topics[1]['origin'], DISCOURSE_SERVER_URL)
        self.assertEqual(topics[1]['uuid'], '5298e4e8383c3f73c9fa7c9599779cbe987a48e4')
        self.assertEqual(topics[1]['updated_on'], 1464144769.526)
        self.assertEqual(topics[1]['category'], "topic")
        self.assertEqual(topics[1]['tag'], DISCOURSE_SERVER_URL)

        # The next assertions check the cases whether the chunk_size is
        # less than the number of posts of a topic
        self.assertEqual(len(topics[1]['data']['post_stream']['posts']), 22)
        self.assertEqual(topics[1]['data']['post_stream']['posts'][0]['id'], 18952)
        self.assertEqual(topics[1]['data']['post_stream']['posts'][20]['id'], 2500)

        # Check requests
        expected = [
            {'page': ['0']},
            {'page': ['1']},
            {},
            {},
            {},
            {}
        ]

        self.assertEqual(len(requests_http), len(expected))

        for i in range(len(expected)):
            self.assertDictEqual(requests_http[i].querystring, expected[i])
コード例 #24
0
    def test_fetch_from_cache(self):
        """Test whether the cache works"""

        bodies_jobs = read_file('data/jenkins_jobs.json', mode='rb')
        bodies_builds_job = read_file('data/jenkins_job_builds.json')

        def request_callback(method, uri, headers):
            status = 200

            if uri.startswith(JENKINS_JOBS_URL):
                body = bodies_jobs
            elif uri.startswith(JENKINS_JOB_BUILDS_URL_1) or \
                 uri.startswith(JENKINS_JOB_BUILDS_URL_2):
                body = bodies_builds_job
            elif uri.startswith(JENKINS_JOB_BUILDS_URL_500_ERROR):
                status = 500
                body = '500 Internal Server Error'
            else:
                body = '{'

            return (status, headers, body)

        httpretty.register_uri(httpretty.GET,
                               JENKINS_JOBS_URL,
                               responses=[
                                    httpretty.Response(body=request_callback) \
                                    for _ in range(3)
                               ])
        httpretty.register_uri(httpretty.GET,
                               JENKINS_JOB_BUILDS_URL_1,
                               responses=[
                                    httpretty.Response(body=request_callback) \
                                    for _ in range(2)
                               ])
        httpretty.register_uri(httpretty.GET,
                               JENKINS_JOB_BUILDS_URL_2,
                               responses=[
                                    httpretty.Response(body=request_callback) \
                                    for _ in range(2)
                               ])
        httpretty.register_uri(
            httpretty.GET,
            JENKINS_JOB_BUILDS_URL_500_ERROR,
            responses=[httpretty.Response(body=request_callback)])
        httpretty.register_uri(
            httpretty.GET,
            JENKINS_JOB_BUILDS_URL_JSON_ERROR,
            responses=[httpretty.Response(body=request_callback)])

        # First, we fetch the builds from the server, storing them
        # in a cache
        cache = Cache(self.tmp_path)
        jenkins = Jenkins(JENKINS_SERVER_URL, cache=cache)

        builds = [build for build in jenkins.fetch()]

        # Now, we get the builds from the cache.
        # The contents should be the same and there won't be
        # any new request to the server
        cached_builds = [build for build in jenkins.fetch_from_cache()]
        self.assertEqual(len(cached_builds), len(builds))

        with open("data/jenkins_build.json") as build_json:
            first_build = json.load(build_json)
            self.assertDictEqual(cached_builds[0]['data'], first_build['data'])
コード例 #25
0
def setup_http_server():
    """Setup a mock HTTP server"""

    http_requests = []

    error_body = read_file('data/phabricator/phabricator_error.json', 'rb')
    tasks_body = read_file('data/phabricator/phabricator_tasks.json', 'rb')
    tasks_next_body = read_file('data/phabricator/phabricator_tasks_next.json', 'rb')
    tasks_empty_body = read_file('data/phabricator/phabricator_tasks_empty.json')
    tasks_trans_body = read_file('data/phabricator/phabricator_transactions.json', 'rb')
    tasks_trans_next_body = read_file('data/phabricator/phabricator_transactions_next.json', 'rb')
    users_body = read_file('data/phabricator/phabricator_users.json', 'rb')
    jane_body = read_file('data/phabricator/phabricator_user_jane.json', 'rb')
    janes_body = read_file('data/phabricator/phabricator_user_janesmith.json', 'rb')
    jdoe_body = read_file('data/phabricator/phabricator_user_jdoe.json', 'rb')
    jrae_body = read_file('data/phabricator/phabricator_user_jrae.json', 'rb')
    jsmith_body = read_file('data/phabricator/phabricator_user_jsmith.json', 'rb')
    phids_body = read_file('data/phabricator/phabricator_phids.json', 'rb')
    herald_body = read_file('data/phabricator/phabricator_phid_herald.json', 'rb')
    bugreport_body = read_file('data/phabricator/phabricator_project_bugreport.json', 'rb')
    teamdevel_body = read_file('data/phabricator/phabricator_project_devel.json', 'rb')

    phids_users = {
        'PHID-USER-ojtcpympsmwenszuef7p': jane_body,
        'PHID-USER-mjr7pnwpg6slsnjcqki7': janes_body,
        'PHID-USER-2uk52xorcqb6sjvp467y': jdoe_body,
        'PHID-USER-pr5fcxy4xk5ofqsfqcfc': jrae_body,
        'PHID-USER-bjxhrstz5fb5gkrojmev': jsmith_body
    }

    phids = {
        'PHID-APPS-PhabricatorHeraldApplication': herald_body,
        'PHID-PROJ-2qnt6thbrd7qnx5bitzy': bugreport_body,
        'PHID-PROJ-zi2ndtoy3fh5pnbqzfdo': teamdevel_body
    }

    def request_callback(method, uri, headers):
        last_request = httpretty.last_request()
        params = json.loads(last_request.parsed_body['params'][0])

        if uri == PHABRICATOR_TASKS_URL:
            if params['constraints']['modifiedStart'] == 1467158400:
                body = tasks_next_body
            elif params['constraints']['modifiedStart'] == 1483228800:
                body = tasks_empty_body
            elif 'after' not in params:
                body = tasks_body
            else:
                body = tasks_next_body
        elif uri == PHABRICATOR_TRANSACTIONS_URL:
            if 69 in params['ids']:
                body = tasks_trans_body
            else:
                body = tasks_trans_next_body
        elif uri == PHABRICATOR_USERS_URL:
            if len(params['phids']) == 4:
                body = users_body
            else:
                body = phids_users[params['phids'][0]]
        elif uri == PHABRICATOR_PHIDS_URL:
            if len(params['phids']) == 2:
                body = phids_body
            else:
                body = phids[params['phids'][0]]
        elif uri == PHABRICATOR_API_ERROR_URL:
            body = error_body
        else:
            raise

        http_requests.append(last_request)

        return (200, headers, body)

    httpretty.register_uri(httpretty.POST,
                           PHABRICATOR_TASKS_URL,
                           responses=[
                               httpretty.Response(body=request_callback)
                           ])
    httpretty.register_uri(httpretty.POST,
                           PHABRICATOR_TRANSACTIONS_URL,
                           responses=[
                               httpretty.Response(body=request_callback)
                           ])
    httpretty.register_uri(httpretty.POST,
                           PHABRICATOR_USERS_URL,
                           responses=[
                               httpretty.Response(body=request_callback)
                           ])
    httpretty.register_uri(httpretty.POST,
                           PHABRICATOR_PHIDS_URL,
                           responses=[
                               httpretty.Response(body=request_callback)
                           ])
    httpretty.register_uri(httpretty.POST,
                           PHABRICATOR_API_ERROR_URL,
                           responses=[
                               httpretty.Response(body=request_callback)
                           ])

    return http_requests
コード例 #26
0
    def test_cache_utilization(self):
        """Verify that when enabled, the cache is used."""
        catalog_integration = self.create_catalog_integration(cache_ttl=5)
        api = create_catalog_api_client(self.user, catalog_integration)

        expected_collection = ['some', 'test', 'data']
        data = {
            'next': None,
            'results': expected_collection,
        }

        self._mock_catalog_api([
            httpretty.Response(body=json.dumps(data),
                               content_type='application/json')
        ], )

        resource_id = 1
        url = '{api_root}/programs/{resource_id}/'.format(
            api_root=CatalogIntegration.current().internal_api_url.strip('/'),
            resource_id=resource_id,
        )

        expected_resource = {'key': 'value'}

        self._mock_catalog_api([
            httpretty.Response(body=json.dumps(expected_resource),
                               content_type='application/json')
        ],
                               url=url)

        cache_key = CatalogIntegration.current().CACHE_KEY

        # Warm up the cache.
        get_edx_api_data(catalog_integration,
                         self.user,
                         'programs',
                         api=api,
                         cache_key=cache_key)
        get_edx_api_data(catalog_integration,
                         self.user,
                         'programs',
                         api=api,
                         resource_id=resource_id,
                         cache_key=cache_key)

        # Hit the cache.
        actual_collection = get_edx_api_data(catalog_integration,
                                             self.user,
                                             'programs',
                                             api=api,
                                             cache_key=cache_key)
        self.assertEqual(actual_collection, expected_collection)

        actual_resource = get_edx_api_data(catalog_integration,
                                           self.user,
                                           'programs',
                                           api=api,
                                           resource_id=resource_id,
                                           cache_key=cache_key)
        self.assertEqual(actual_resource, expected_resource)

        # Verify that only two requests were made, not four.
        self._assert_num_requests(2)
コード例 #27
0
def configure_http_server(depth=1):
    bodies_jobs = read_file('data/jenkins/jenkins_jobs.json', mode='rb')
    bodies_builds_job = read_file('data/jenkins/jenkins_job_builds.json')

    def request_callback(method, uri, headers):
        status = 200

        if depth == 2:
            if uri.startswith(JENKINS_JOBS_URL):
                body = bodies_jobs
            elif (uri.startswith(JENKINS_JOB_BUILDS_URL_1_DEPTH_2)
                  or uri.startswith(JENKINS_JOB_BUILDS_URL_2_DEPTH_2)):
                body = bodies_builds_job
            elif uri.startswith(JENKINS_JOB_BUILDS_URL_500_ERROR_DEPTH_2):
                status = 500
                body = '500 Internal Server Error'
            else:
                body = '{'
        else:
            if uri.startswith(JENKINS_JOBS_URL):
                body = bodies_jobs
            elif (uri.startswith(JENKINS_JOB_BUILDS_URL_1_DEPTH_1)
                  or uri.startswith(JENKINS_JOB_BUILDS_URL_2_DEPTH_1)):
                body = bodies_builds_job
            elif uri.startswith(JENKINS_JOB_BUILDS_URL_500_ERROR_DEPTH_1):
                status = 500
                body = '500 Internal Server Error'
            else:
                body = '{'

        requests_http.append(httpretty.last_request())

        return status, headers, body

    httpretty.register_uri(httpretty.GET,
                           JENKINS_JOBS_URL,
                           responses=[
                               httpretty.Response(body=request_callback)
                               for _ in range(3)
                           ])
    httpretty.register_uri(httpretty.GET,
                           JENKINS_JOB_BUILDS_URL_1_DEPTH_1,
                           responses=[
                               httpretty.Response(body=request_callback)
                               for _ in range(2)
                           ])
    httpretty.register_uri(httpretty.GET,
                           JENKINS_JOB_BUILDS_URL_2_DEPTH_1,
                           responses=[
                               httpretty.Response(body=request_callback)
                               for _ in range(2)
                           ])
    httpretty.register_uri(
        httpretty.GET,
        JENKINS_JOB_BUILDS_URL_500_ERROR_DEPTH_1,
        responses=[httpretty.Response(body=request_callback)])
    httpretty.register_uri(
        httpretty.GET,
        JENKINS_JOB_BUILDS_URL_JSON_ERROR_DEPTH_1,
        responses=[httpretty.Response(body=request_callback)])

    httpretty.register_uri(httpretty.GET,
                           JENKINS_JOBS_URL,
                           responses=[
                               httpretty.Response(body=request_callback)
                               for _ in range(3)
                           ])
    httpretty.register_uri(httpretty.GET,
                           JENKINS_JOB_BUILDS_URL_1_DEPTH_2,
                           responses=[
                               httpretty.Response(body=request_callback)
                               for _ in range(2)
                           ])
    httpretty.register_uri(httpretty.GET,
                           JENKINS_JOB_BUILDS_URL_2_DEPTH_2,
                           responses=[
                               httpretty.Response(body=request_callback)
                               for _ in range(2)
                           ])
    httpretty.register_uri(
        httpretty.GET,
        JENKINS_JOB_BUILDS_URL_500_ERROR_DEPTH_2,
        responses=[httpretty.Response(body=request_callback)])
    httpretty.register_uri(
        httpretty.GET,
        JENKINS_JOB_BUILDS_URL_JSON_ERROR_DEPTH_2,
        responses=[httpretty.Response(body=request_callback)])
コード例 #28
0
def setup_http_server(archived_channel=False):
    """Setup a mock HTTP server"""

    http_requests = []

    channel_error = read_file('data/slack/slack_error.json', 'rb')
    channel_empty = read_file('data/slack/slack_history_empty.json', 'rb')

    if archived_channel:
        channel_info = read_file('data/slack/slack_info_archived.json', 'rb')
    else:
        channel_info = read_file('data/slack/slack_info.json', 'rb')

    conversation_members_1 = read_file('data/slack/slack_members1.json', 'rb')
    conversation_members_2 = read_file('data/slack/slack_members2.json', 'rb')
    channel_history = read_file('data/slack/slack_history.json', 'rb')
    channel_history_next = read_file('data/slack/slack_history_next.json', 'rb')
    channel_history_date = read_file('data/slack/slack_history_20150323.json', 'rb')
    user_U0001 = read_file('data/slack/slack_user_U0001.json', 'rb')
    user_U0002 = read_file('data/slack/slack_user_U0002.json', 'rb')
    user_U0003 = read_file('data/slack/slack_user_U0003.json', 'rb')

    def request_callback(method, uri, headers):
        last_request = httpretty.last_request()
        params = last_request.querystring

        status = 200

        if uri.startswith(SLACK_CHANNEL_INFO_URL):
            body = channel_info
        elif uri.startswith(SLACK_CHANNEL_HISTORY_URL):
            if params['channel'][0] != 'C011DUKE8':
                body = channel_error
            elif 'latest' not in params:
                body = channel_history
            elif (params['oldest'][0] == '0.999990' and
                  params['latest'][0] == '1427135733.000068'):
                body = channel_history_next
            elif (params['oldest'][0] == '0' and
                  params['latest'][0] == '1483228800.000000'):
                body = channel_history
            elif (params['oldest'][0] == '0' and
                  params['latest'][0] == '1427135733.000068'):
                body = channel_history_next
            elif (params['oldest'][0] == '1427135740.000059' and
                  params['latest'][0] == '1483228800.000000'):
                body = channel_history_date
            elif (params['oldest'][0] == '1451606399.999990' and
                  params['latest'][0] == '1483228800.000000'):
                body = channel_empty
            else:
                raise Exception
        elif uri.startswith(SLACK_USER_INFO_URL):
            if params['user'][0] == 'U0001':
                body = user_U0001
            elif params['user'][0] == 'U0002':
                body = user_U0002
            else:
                body = user_U0003
        elif uri.startswith(SLACK_CONVERSATION_MEMBERS):
            if 'cursor' not in params:
                body = conversation_members_1
            else:
                body = conversation_members_2
        else:
            raise

        http_requests.append(last_request)

        return status, headers, body

    httpretty.register_uri(httpretty.GET,
                           SLACK_CHANNEL_INFO_URL,
                           responses=[
                               httpretty.Response(body=request_callback)
                           ])

    httpretty.register_uri(httpretty.GET,
                           SLACK_CHANNEL_HISTORY_URL,
                           responses=[
                               httpretty.Response(body=request_callback)
                               for _ in range(1)
                           ])

    httpretty.register_uri(httpretty.GET,
                           SLACK_USER_INFO_URL,
                           responses=[
                               httpretty.Response(body=request_callback)
                           ])

    httpretty.register_uri(httpretty.GET,
                           SLACK_CONVERSATION_MEMBERS,
                           responses=[
                               httpretty.Response(body=request_callback)
                           ])

    return http_requests
コード例 #29
0
def setup_http_server(rate_limit=-1, reset_rate_limit=-1):
    """Setup a mock HTTP server"""

    http_requests = []

    events_bodies = [
        read_file('data/meetup/meetup_events.json', 'rb'),
        read_file('data/meetup/meetup_events_next.json', 'rb')
    ]
    events_range_body = read_file('data/meetup/meetup_events_range.json', 'rb')
    events_empty_body = read_file('data/meetup/meetup_events_empty.json', 'rb')
    event_comments_body = read_file('data/meetup/meetup_comments.json', 'rb')
    event_rsvps_body = read_file('data/meetup/meetup_rsvps.json', 'rb')

    def request_callback(method, uri, headers, too_many_requests=False):
        last_request = httpretty.last_request()

        if uri.startswith(MEETUP_EVENT_1_COMMENTS_URL):
            body = event_comments_body
        elif uri.startswith(MEETUP_EVENT_2_COMMENTS_URL):
            body = event_comments_body
        elif uri.startswith(MEETUP_EVENT_3_COMMENTS_URL):
            body = event_comments_body
        elif uri.startswith(MEETUP_EVENT_1_RSVPS_URL):
            body = event_rsvps_body
        elif uri.startswith(MEETUP_EVENT_2_RSVPS_URL):
            body = event_rsvps_body
        elif uri.startswith(MEETUP_EVENT_3_RSVPS_URL):
            body = event_rsvps_body
        elif uri.startswith(MEETUP_EVENTS_URL):
            params = last_request.querystring
            scroll = params.get('scroll', None)

            if scroll and scroll[0] == 'since:2016-09-25T00:00:00.000Z':
                # Last events and no pagination
                body = events_bodies[-1]
            elif scroll and scroll[0] == 'since:2016-04-08T00:00:00.000Z':
                body = events_range_body
            elif scroll and scroll[0] == 'since:2017-01-01T00:00:00.000Z':
                body = events_empty_body
            else:
                body = events_bodies.pop(0)

                if events_bodies:
                    # Mock the 'Link' header with a fake URL
                    headers['Link'] = '<' + MEETUP_EVENTS_URL + '>; rel="next"'

                if rate_limit != -1:
                    headers['X-RateLimit-Remaining'] = str(rate_limit)
                if reset_rate_limit != -1:
                    headers['X-RateLimit-Reset'] = str(reset_rate_limit)
        else:
            raise

        if rate_limit == -1:
            headers['X-RateLimit-Remaining'] = '10000000'
        if reset_rate_limit == -1:
            headers['X-RateLimit-Reset'] = '0'

        http_requests.append(last_request)

        return (200, headers, body)

    httpretty.register_uri(httpretty.GET,
                           MEETUP_EVENTS_URL,
                           responses=[
                               httpretty.Response(body=request_callback)
                               for _ in range(2)
                           ])
    for url in MEETUP_COMMENTS_URL:
        httpretty.register_uri(
            httpretty.GET,
            url,
            responses=[httpretty.Response(body=request_callback)])
    for url in MEETUP_RSVPS_URL:
        httpretty.register_uri(
            httpretty.GET,
            url,
            responses=[httpretty.Response(body=request_callback)])

    return http_requests
コード例 #30
0
    def test_fetch_from_cache(self):
        """Test whether the cache works"""

        requests_http = []

        bodies_topics = [
            read_file('data/discourse_topics.json'),
            read_file('data/discourse_topics_empty.json')
        ]
        body_topic_1148 = read_file('data/discourse_topic_1148.json')
        body_topic_1149 = read_file('data/discourse_topic_1149.json')
        body_post = read_file('data/discourse_post.json')

        def request_callback(method, uri, headers):
            if uri.startswith(DISCOURSE_TOPICS_URL):
                body = bodies_topics.pop(0)
            elif uri.startswith(DISCOURSE_TOPIC_URL_1148):
                body = body_topic_1148
            elif uri.startswith(DISCOURSE_TOPIC_URL_1149):
                body = body_topic_1149
            elif uri.startswith(DISCOURSE_POST_URL_1) or \
                 uri.startswith(DISCOURSE_POST_URL_2):
                body = body_post
            else:
                raise

            requests_http.append(httpretty.last_request())

            return (200, headers, body)

        httpretty.register_uri(httpretty.GET,
                               DISCOURSE_TOPICS_URL,
                               responses=[
                                    httpretty.Response(body=request_callback) \
                                    for _ in range(2)
                               ])
        httpretty.register_uri(
            httpretty.GET,
            DISCOURSE_TOPIC_URL_1148,
            responses=[httpretty.Response(body=request_callback)])
        httpretty.register_uri(
            httpretty.GET,
            DISCOURSE_TOPIC_URL_1149,
            responses=[httpretty.Response(body=request_callback)])
        httpretty.register_uri(
            httpretty.GET,
            DISCOURSE_POST_URL_1,
            responses=[httpretty.Response(body=request_callback)])
        httpretty.register_uri(
            httpretty.GET,
            DISCOURSE_POST_URL_2,
            responses=[httpretty.Response(body=request_callback)])

        # First, we fetch the topics from the server, storing them
        # in a cache
        cache = Cache(self.tmp_path)
        discourse = Discourse(DISCOURSE_SERVER_URL, cache=cache)

        topics = [topic for topic in discourse.fetch()]
        self.assertEqual(len(requests_http), 6)

        # Now, we get the topics from the cache.
        # The contents should be the same and there won't be
        # any new request to the server
        cached_topics = [topic for topic in discourse.fetch_from_cache()]
        self.assertEqual(len(cached_topics), len(topics))

        self.assertEqual(len(cached_topics), 2)

        # Topics are returned in reverse order
        # from oldest to newest
        self.assertEqual(cached_topics[0]['data']['id'], 1149)
        self.assertEqual(len(cached_topics[0]['data']['post_stream']['posts']),
                         2)
        self.assertEqual(cached_topics[0]['origin'], DISCOURSE_SERVER_URL)
        self.assertEqual(cached_topics[0]['uuid'],
                         '18068b95de1323a84c8e11dee8f46fd137f10c86')
        self.assertEqual(cached_topics[0]['updated_on'], 1464134770.909)
        self.assertEqual(cached_topics[0]['category'], 'topic')
        self.assertEqual(cached_topics[0]['origin'], DISCOURSE_SERVER_URL)

        self.assertEqual(cached_topics[1]['data']['id'], 1148)
        self.assertEqual(cached_topics[1]['origin'], DISCOURSE_SERVER_URL)
        self.assertEqual(cached_topics[1]['uuid'],
                         '5298e4e8383c3f73c9fa7c9599779cbe987a48e4')
        self.assertEqual(cached_topics[1]['updated_on'], 1464144769.526)
        self.assertEqual(cached_topics[1]['category'], 'topic')
        self.assertEqual(cached_topics[1]['origin'], DISCOURSE_SERVER_URL)

        # The next assertions check the cases whether the chunk_size is
        # less than the number of posts of a topic
        self.assertEqual(len(cached_topics[1]['data']['post_stream']['posts']),
                         22)
        self.assertEqual(
            cached_topics[1]['data']['post_stream']['posts'][0]['id'], 18952)
        self.assertEqual(
            cached_topics[1]['data']['post_stream']['posts'][20]['id'], 2500)

        # No more requests were sent
        self.assertEqual(len(requests_http), 6)