def test_fetch_empty(self): """Test whether it works when no bugs are fetched""" body = read_file('data/bugzilla/bugzilla_version.xml') httpretty.register_uri(httpretty.GET, BUGZILLA_METADATA_URL, body=body, status=200) httpretty.register_uri(httpretty.GET, BUGZILLA_BUGLIST_URL, body="", status=200) from_date = datetime.datetime(2100, 1, 1) bg = Bugzilla(BUGZILLA_SERVER_URL) bugs = [bug for bug in bg.fetch(from_date=from_date)] self.assertEqual(len(bugs), 0) # Check request expected = { 'ctype': ['csv'], 'limit': ['10000'], 'order': ['changeddate'], 'chfieldfrom': ['2100-01-01 00:00:00'] } req = httpretty.last_request() self.assertDictEqual(req.querystring, expected)
def test_fetch(self): """Test whether a list of bugs is returned""" requests = [] bodies_csv = [ read_file('data/bugzilla/bugzilla_buglist.csv'), read_file('data/bugzilla/bugzilla_buglist_next.csv'), "" ] bodies_xml = [ read_file('data/bugzilla/bugzilla_version.xml', mode='rb'), read_file('data/bugzilla/bugzilla_bugs_details.xml', mode='rb'), read_file('data/bugzilla/bugzilla_bugs_details_next.xml', mode='rb') ] bodies_html = [ read_file('data/bugzilla/bugzilla_bug_activity.html', mode='rb'), read_file('data/bugzilla/bugzilla_bug_activity_empty.html', mode='rb') ] def request_callback(method, uri, headers): if uri.startswith(BUGZILLA_BUGLIST_URL): body = bodies_csv.pop(0) elif uri.startswith(BUGZILLA_BUG_URL): body = bodies_xml.pop(0) else: body = bodies_html[len(requests) % 2] requests.append(httpretty.last_request()) return (200, headers, body) httpretty.register_uri(httpretty.GET, BUGZILLA_BUGLIST_URL, responses=[ httpretty.Response(body=request_callback) for _ in range(3) ]) httpretty.register_uri(httpretty.GET, BUGZILLA_BUG_URL, responses=[ httpretty.Response(body=request_callback) for _ in range(2) ]) httpretty.register_uri(httpretty.GET, BUGZILLA_BUG_ACTIVITY_URL, responses=[ httpretty.Response(body=request_callback) for _ in range(7) ]) bg = Bugzilla(BUGZILLA_SERVER_URL, max_bugs=5, max_bugs_csv=500) bugs = [bug for bug in bg.fetch()] self.assertEqual(len(bugs), 7) self.assertEqual(bugs[0]['data']['bug_id'][0]['__text__'], '15') self.assertEqual(len(bugs[0]['data']['activity']), 0) self.assertEqual(bugs[0]['origin'], BUGZILLA_SERVER_URL) self.assertEqual(bugs[0]['uuid'], '5a8a1e25dfda86b961b4146050883cbfc928f8ec') self.assertEqual(bugs[0]['updated_on'], 1248276445.0) self.assertEqual(bugs[0]['category'], 'bug') self.assertEqual(bugs[0]['tag'], BUGZILLA_SERVER_URL) self.assertEqual(bugs[6]['data']['bug_id'][0]['__text__'], '888') self.assertEqual(len(bugs[6]['data']['activity']), 14) self.assertEqual(bugs[6]['origin'], BUGZILLA_SERVER_URL) self.assertEqual(bugs[6]['uuid'], 'b4009442d38f4241a4e22e3e61b7cd8ef5ced35c') self.assertEqual(bugs[6]['updated_on'], 1439404330.0) self.assertEqual(bugs[6]['category'], 'bug') self.assertEqual(bugs[6]['tag'], BUGZILLA_SERVER_URL) # Check requests expected = [{ 'ctype': ['xml'] }, { 'ctype': ['csv'], 'limit': ['500'], 'order': ['changeddate'], 'chfieldfrom': ['1970-01-01 00:00:00'] }, { 'ctype': ['csv'], 'limit': ['500'], 'order': ['changeddate'], 'chfieldfrom': ['2009-07-30 11:35:33'] }, { 'ctype': ['csv'], 'limit': ['500'], 'order': ['changeddate'], 'chfieldfrom': ['2015-08-12 18:32:11'] }, { 'ctype': ['xml'], 'id': ['15', '18', '17', '20', '19'], 'excludefield': ['attachmentdata'] }, { 'id': ['15'] }, { 'id': ['18'] }, { 'id': ['17'] }, { 'id': ['20'] }, { 'id': ['19'] }, { 'ctype': ['xml'], 'id': ['30', '888'], 'excludefield': ['attachmentdata'] }, { 'id': ['30'] }, { 'id': ['888'] }] self.assertEqual(len(requests), len(expected)) for i in range(len(expected)): self.assertDictEqual(requests[i].querystring, expected[i])
def test_fetch_auth(self): """Test whether authentication works""" requests = [] bodies_csv = [read_file('data/bugzilla/bugzilla_buglist_next.csv'), ""] bodies_xml = [ read_file('data/bugzilla/bugzilla_version.xml', mode='rb'), read_file('data/bugzilla/bugzilla_bugs_details_next.xml', mode='rb') ] bodies_html = [ read_file('data/bugzilla/bugzilla_bug_activity.html', mode='rb'), read_file('data/bugzilla/bugzilla_bug_activity_empty.html', mode='rb') ] def request_callback(method, uri, headers): if uri.startswith(BUGZILLA_LOGIN_URL): body = "index.cgi?logout=1" elif uri.startswith(BUGZILLA_BUGLIST_URL): body = bodies_csv.pop(0) elif uri.startswith(BUGZILLA_BUG_URL): body = bodies_xml.pop(0) else: body = bodies_html[(len(requests) + 1) % 2] requests.append(httpretty.last_request()) return (200, headers, body) httpretty.register_uri( httpretty.POST, BUGZILLA_LOGIN_URL, responses=[httpretty.Response(body=request_callback)]) httpretty.register_uri(httpretty.GET, BUGZILLA_BUGLIST_URL, responses=[ httpretty.Response(body=request_callback) for _ in range(2) ]) httpretty.register_uri( httpretty.GET, BUGZILLA_BUG_URL, responses=[httpretty.Response(body=request_callback)]) httpretty.register_uri(httpretty.GET, BUGZILLA_BUG_ACTIVITY_URL, responses=[ httpretty.Response(body=request_callback) for _ in range(2) ]) from_date = datetime.datetime(2015, 1, 1) bg = Bugzilla(BUGZILLA_SERVER_URL, user='******', password='******') bugs = [bug for bug in bg.fetch(from_date=from_date)] self.assertEqual(len(bugs), 2) self.assertEqual(bugs[0]['data']['bug_id'][0]['__text__'], '30') self.assertEqual(len(bugs[0]['data']['activity']), 14) self.assertEqual(bugs[0]['origin'], BUGZILLA_SERVER_URL) self.assertEqual(bugs[0]['uuid'], '4b166308f205121bc57704032acdc81b6c9bb8b1') self.assertEqual(bugs[0]['updated_on'], 1426868155.0) self.assertEqual(bugs[0]['category'], 'bug') self.assertEqual(bugs[0]['tag'], BUGZILLA_SERVER_URL) self.assertEqual(bugs[1]['data']['bug_id'][0]['__text__'], '888') self.assertEqual(len(bugs[1]['data']['activity']), 0) self.assertEqual(bugs[1]['origin'], BUGZILLA_SERVER_URL) self.assertEqual(bugs[1]['uuid'], 'b4009442d38f4241a4e22e3e61b7cd8ef5ced35c') self.assertEqual(bugs[1]['updated_on'], 1439404330.0) self.assertEqual(bugs[1]['category'], 'bug') self.assertEqual(bugs[1]['tag'], BUGZILLA_SERVER_URL) # Check requests auth_expected = { 'Bugzilla_login': ['*****@*****.**'], 'Bugzilla_password': ['1234'], 'GoAheadAndLogIn': ['Log in'] } expected = [{ 'ctype': ['xml'] }, { 'ctype': ['csv'], 'limit': ['10000'], 'order': ['changeddate'], 'chfieldfrom': ['2015-01-01 00:00:00'] }, { 'ctype': ['csv'], 'limit': ['10000'], 'order': ['changeddate'], 'chfieldfrom': ['2015-08-12 18:32:11'] }, { 'ctype': ['xml'], 'id': ['30', '888'], 'excludefield': ['attachmentdata'] }, { 'id': ['30'] }, { 'id': ['888'] }] # Check authentication request auth_req = requests.pop(0) self.assertDictEqual(auth_req.parsed_body, auth_expected) # Check the rests of the headers self.assertEqual(len(requests), len(expected)) for i in range(len(expected)): self.assertDictEqual(requests[i].querystring, expected[i])
def test_search_fields(self): """Test whether the search_fields is properly set""" requests = [] bodies_csv = [ read_file('data/bugzilla/bugzilla_buglist.csv'), read_file('data/bugzilla/bugzilla_buglist_next.csv'), "" ] bodies_xml = [ read_file('data/bugzilla/bugzilla_version.xml', mode='rb'), read_file('data/bugzilla/bugzilla_bugs_details.xml', mode='rb'), read_file('data/bugzilla/bugzilla_bugs_details_next.xml', mode='rb') ] bodies_html = [ read_file('data/bugzilla/bugzilla_bug_activity.html', mode='rb'), read_file('data/bugzilla/bugzilla_bug_activity_empty.html', mode='rb') ] def request_callback(method, uri, headers): if uri.startswith(BUGZILLA_BUGLIST_URL): body = bodies_csv.pop(0) elif uri.startswith(BUGZILLA_BUG_URL): body = bodies_xml.pop(0) else: body = bodies_html[len(requests) % 2] requests.append(httpretty.last_request()) return 200, headers, body httpretty.register_uri(httpretty.GET, BUGZILLA_BUGLIST_URL, responses=[ httpretty.Response(body=request_callback) for _ in range(3) ]) httpretty.register_uri(httpretty.GET, BUGZILLA_BUG_URL, responses=[ httpretty.Response(body=request_callback) for _ in range(2) ]) httpretty.register_uri(httpretty.GET, BUGZILLA_BUG_ACTIVITY_URL, responses=[ httpretty.Response(body=request_callback) for _ in range(7) ]) bg = Bugzilla(BUGZILLA_SERVER_URL, max_bugs=5, max_bugs_csv=500) bugs = [bug for bug in bg.fetch()] self.assertEqual(len(bugs), 7) bug = bugs[0] self.assertEqual(bg.metadata_id(bug['data']), bug['search_fields']['item_id']) self.assertEqual(bug['data']['product'][0]['__text__'], 'LibreGeoSocial (Android)') self.assertEqual(bug['data']['product'][0]['__text__'], bug['search_fields']['product']) self.assertEqual(bug['data']['component'][0]['__text__'], 'general') self.assertEqual(bug['data']['component'][0]['__text__'], bug['search_fields']['component']) bug = bugs[1] self.assertEqual(bg.metadata_id(bug['data']), bug['search_fields']['item_id']) self.assertEqual(bug['data']['product'][0]['__text__'], 'LibreGeoSocial (Android)') self.assertEqual(bug['data']['product'][0]['__text__'], bug['search_fields']['product']) self.assertEqual(bug['data']['component'][0]['__text__'], 'general') self.assertEqual(bug['data']['component'][0]['__text__'], bug['search_fields']['component']) bug = bugs[2] self.assertEqual(bg.metadata_id(bug['data']), bug['search_fields']['item_id']) self.assertEqual(bug['data']['product'][0]['__text__'], 'Bicho') self.assertEqual(bug['data']['product'][0]['__text__'], bug['search_fields']['product']) self.assertEqual(bug['data']['component'][0]['__text__'], 'General') self.assertEqual(bug['data']['component'][0]['__text__'], bug['search_fields']['component']) bug = bugs[3] self.assertEqual(bg.metadata_id(bug['data']), bug['search_fields']['item_id']) self.assertEqual(bug['data']['product'][0]['__text__'], 'LibreGeoSocial (server)') self.assertEqual(bug['data']['product'][0]['__text__'], bug['search_fields']['product']) self.assertEqual(bug['data']['component'][0]['__text__'], 'general') self.assertEqual(bug['data']['component'][0]['__text__'], bug['search_fields']['component']) bug = bugs[4] self.assertEqual(bg.metadata_id(bug['data']), bug['search_fields']['item_id']) self.assertEqual(bug['data']['product'][0]['__text__'], 'CVSAnalY') self.assertEqual(bug['data']['product'][0]['__text__'], bug['search_fields']['product']) self.assertEqual(bug['data']['component'][0]['__text__'], 'general') self.assertEqual(bug['data']['component'][0]['__text__'], bug['search_fields']['component']) bug = bugs[5] self.assertEqual(bg.metadata_id(bug['data']), bug['search_fields']['item_id']) self.assertEqual(bug['data']['product'][0]['__text__'], 'Bicho') self.assertEqual(bug['data']['product'][0]['__text__'], bug['search_fields']['product']) self.assertEqual(bug['data']['component'][0]['__text__'], 'General') self.assertEqual(bug['data']['component'][0]['__text__'], bug['search_fields']['component']) bug = bugs[6] self.assertEqual(bg.metadata_id(bug['data']), bug['search_fields']['item_id']) self.assertEqual(bug['data']['product'][0]['__text__'], 'CVSAnalY') self.assertEqual(bug['data']['product'][0]['__text__'], bug['search_fields']['product']) self.assertEqual(bug['data']['component'][0]['__text__'], 'general') self.assertEqual(bug['data']['component'][0]['__text__'], bug['search_fields']['component'])
def test_fetch_from_cache(self): """Test whether the cache works""" requests = [] bodies_csv = [ read_file('data/bugzilla_buglist.csv'), read_file('data/bugzilla_buglist_next.csv'), "" ] bodies_xml = [ read_file('data/bugzilla_version.xml', mode='rb'), read_file('data/bugzilla_bugs_details.xml', mode='rb'), read_file('data/bugzilla_bugs_details_next.xml', mode='rb') ] bodies_html = [ read_file('data/bugzilla_bug_activity.html', mode='rb'), read_file('data/bugzilla_bug_activity_empty.html', mode='rb') ] def request_callback(method, uri, headers): if uri.startswith(BUGZILLA_BUGLIST_URL): body = bodies_csv.pop(0) elif uri.startswith(BUGZILLA_BUG_URL): body = bodies_xml.pop(0) else: body = bodies_html[len(requests) % 2] requests.append(httpretty.last_request()) return (200, headers, body) httpretty.register_uri(httpretty.GET, BUGZILLA_BUGLIST_URL, responses=[ httpretty.Response(body=request_callback) \ for _ in range(3) ]) httpretty.register_uri(httpretty.GET, BUGZILLA_BUG_URL, responses=[ httpretty.Response(body=request_callback) \ for _ in range(2) ]) httpretty.register_uri(httpretty.GET, BUGZILLA_BUG_ACTIVITY_URL, responses=[ httpretty.Response(body=request_callback) \ for _ in range(7) ]) # First, we fetch the bugs from the server, storing them # in a cache cache = Cache(self.tmp_path) bg = Bugzilla(BUGZILLA_SERVER_URL, max_bugs=5, cache=cache) bugs = [bug for bug in bg.fetch()] self.assertEqual(len(requests), 13) # Now, we get the bugs from the cache. # The contents should be the same and there won't be # any new request to the server cached_bugs = [bug for bug in bg.fetch_from_cache()] self.assertEqual(len(cached_bugs), len(bugs)) self.assertEqual(len(cached_bugs), 7) self.assertEqual(cached_bugs[0]['data']['bug_id'][0]['__text__'], '15') self.assertEqual(len(cached_bugs[0]['data']['activity']), 0) self.assertEqual(cached_bugs[0]['origin'], BUGZILLA_SERVER_URL) self.assertEqual(cached_bugs[0]['uuid'], '5a8a1e25dfda86b961b4146050883cbfc928f8ec') self.assertEqual(cached_bugs[0]['updated_on'], 1248276445.0) self.assertEqual(cached_bugs[0]['category'], 'bug') self.assertEqual(cached_bugs[0]['tag'], BUGZILLA_SERVER_URL) self.assertEqual(cached_bugs[6]['data']['bug_id'][0]['__text__'], '888') self.assertEqual(len(cached_bugs[6]['data']['activity']), 14) self.assertEqual(cached_bugs[6]['origin'], BUGZILLA_SERVER_URL) self.assertEqual(cached_bugs[6]['uuid'], 'b4009442d38f4241a4e22e3e61b7cd8ef5ced35c') self.assertEqual(cached_bugs[6]['updated_on'], 1439404330.0) self.assertEqual(cached_bugs[6]['category'], 'bug') self.assertEqual(cached_bugs[6]['tag'], BUGZILLA_SERVER_URL) self.assertEqual(len(requests), 13)
def test_fetch_from_date(self): """Test whether a list of bugs is returned from a given date""" requests = [] bodies_csv = [read_file('data/bugzilla_buglist_next.csv'), ""] bodies_xml = [ read_file('data/bugzilla_version.xml', mode='rb'), read_file('data/bugzilla_bugs_details_next.xml', mode='rb') ] bodies_html = [ read_file('data/bugzilla_bug_activity.html', mode='rb'), read_file('data/bugzilla_bug_activity_empty.html', mode='rb') ] def request_callback(method, uri, headers): if uri.startswith(BUGZILLA_BUGLIST_URL): body = bodies_csv.pop(0) elif uri.startswith(BUGZILLA_BUG_URL): body = bodies_xml.pop(0) else: body = bodies_html[len(requests) % 2] requests.append(httpretty.last_request()) return (200, headers, body) httpretty.register_uri(httpretty.GET, BUGZILLA_BUGLIST_URL, responses=[ httpretty.Response(body=request_callback) \ for _ in range(2) ]) httpretty.register_uri( httpretty.GET, BUGZILLA_BUG_URL, responses=[httpretty.Response(body=request_callback)]) httpretty.register_uri(httpretty.GET, BUGZILLA_BUG_ACTIVITY_URL, responses=[ httpretty.Response(body=request_callback) \ for _ in range(2) ]) from_date = datetime.datetime(2015, 1, 1) bg = Bugzilla(BUGZILLA_SERVER_URL) bugs = [bug for bug in bg.fetch(from_date=from_date)] self.assertEqual(len(bugs), 2) self.assertEqual(bugs[0]['data']['bug_id'][0]['__text__'], '30') self.assertEqual(len(bugs[0]['data']['activity']), 14) self.assertEqual(bugs[0]['origin'], BUGZILLA_SERVER_URL) self.assertEqual(bugs[0]['uuid'], '4b166308f205121bc57704032acdc81b6c9bb8b1') self.assertEqual(bugs[0]['updated_on'], 1426868155.0) self.assertEqual(bugs[0]['category'], 'bug') self.assertEqual(bugs[0]['tag'], BUGZILLA_SERVER_URL) self.assertEqual(bugs[1]['data']['bug_id'][0]['__text__'], '888') self.assertEqual(len(bugs[1]['data']['activity']), 0) self.assertEqual(bugs[1]['origin'], BUGZILLA_SERVER_URL) self.assertEqual(bugs[1]['uuid'], 'b4009442d38f4241a4e22e3e61b7cd8ef5ced35c') self.assertEqual(bugs[1]['updated_on'], 1439404330.0) self.assertEqual(bugs[1]['category'], 'bug') self.assertEqual(bugs[1]['tag'], BUGZILLA_SERVER_URL) # Check requests expected = [{ 'ctype': ['xml'] }, { 'ctype': ['csv'], 'limit': ['10000'], 'order': ['changeddate'], 'chfieldfrom': ['2015-01-01 00:00:00'] }, { 'ctype': ['csv'], 'limit': ['10000'], 'order': ['changeddate'], 'chfieldfrom': ['2015-08-12 18:32:11'] }, { 'ctype': ['xml'], 'id': ['30', '888'], 'excludefield': ['attachmentdata'] }, { 'id': ['30'] }, { 'id': ['888'] }] self.assertEqual(len(requests), len(expected)) for i in range(len(expected)): self.assertDictEqual(requests[i].querystring, expected[i])
default=-1) args = parser.parse_args() # print(args) # print ((args.date_from, args.date_to)) bugzilla = Bugzilla(args.url, user=args.user, password=args.password, max_bugs=200, max_bugs_csv=10000, tag=None, archive=None) # print(bugzilla) oids = set() for bug in bugzilla.fetch(category=args.category, from_date=args.date_from): # print(bug) # print(bug.keys()) # print(bug['data'].keys()) product = bug['data']['product'][0]['__text__'] if args.product and args.product != product: continue # dtu = dateutil.parser.parse(bug['data']['delta_ts'][0]['__text__']) dtu = utc.localize( datetime.datetime.fromtimestamp( bugzilla.metadata_updated_on(bug['data']))) # print(dtu) if args.use_created_date: dtc = dateutil.parser.parse(bug['data']['creation_ts'][0]['__text__']) diff = (dtu - dtc) / datetime.timedelta(seconds=1) # print((product, dtc, dtu))