def test_parse_activity(self): """Test activity bug parsing""" raw_html = read_file('data/bugzilla/bugzilla_bug_activity.html') activity = Bugzilla.parse_bug_activity(raw_html) result = [event for event in activity] self.assertEqual(len(result), 14) expected = { 'Who': '*****@*****.**', 'When': '2013-06-25 11:57:23 CEST', 'What': 'Attachment #172 Attachment is obsolete', 'Removed': '0', 'Added': '1' } self.assertDictEqual(result[0], expected) expected = { 'Who': '*****@*****.**', 'When': '2013-06-25 11:59:07 CEST', 'What': 'Depends on', 'Removed': '350', 'Added': '' } self.assertDictEqual(result[6], expected)
def test_parse_activity_no_table(self): """Test if it raises an exception the activity table is not found""" raw_html = read_file('data/bugzilla/bugzilla_bug_activity_not_valid.html') with self.assertRaises(ParseError): activity = Bugzilla.parse_bug_activity(raw_html) _ = [event for event in activity]
def test_parse_invalid_bug_details(self): """Test whether it fails parsing an invalid XML with no bugs""" raw_xml = read_file('data/bugzilla/bugzilla_bugs_details_not_valid.xml') with self.assertRaises(ParseError): bugs = Bugzilla.parse_bugs_details(raw_xml) _ = [bug for bug in bugs]
def test_parse_empty_activity(self): """Test the parser when the activity table is empty""" # There are two possible cases for empty tables. # The first case includes the term 'bug' while the second # one replaces it by 'issue'. raw_html = read_file('data/bugzilla/bugzilla_bug_activity_empty.html') activity = Bugzilla.parse_bug_activity(raw_html) result = [event for event in activity] self.assertEqual(len(result), 0) raw_html = read_file('data/bugzilla/bugzilla_bug_activity_empty_alt.html') activity = Bugzilla.parse_bug_activity(raw_html) result = [event for event in activity] self.assertEqual(len(result), 0)
def test_parse_bugs_details(self): """Test bugs details parsing""" raw_xml = read_file('data/bugzilla/bugzilla_bugs_details.xml') bugs = Bugzilla.parse_bugs_details(raw_xml) result = [bug for bug in bugs] self.assertEqual(len(result), 5) bug_ids = [bug['bug_id'][0]['__text__'] for bug in result] expected = ['15', '18', '17', '20', '19'] self.assertListEqual(bug_ids, expected) raw_xml = read_file('data/bugzilla/bugzilla_bugs_details_next.xml') bugs = Bugzilla.parse_bugs_details(raw_xml) result = [bug for bug in bugs]
def test_parse_buglist(self): """Test buglist parsing""" raw_csv = read_file('data/bugzilla/bugzilla_buglist.csv') bugs = Bugzilla.parse_buglist(raw_csv) result = [bug for bug in bugs] self.assertEqual(len(result), 5) self.assertEqual(result[0]['bug_id'], '15') self.assertEqual(result[4]['bug_id'], '19')
def test_initialization(self): """Test whether attributes are initializated""" bg = Bugzilla(BUGZILLA_SERVER_URL, tag='test', max_bugs=5) self.assertEqual(bg.url, BUGZILLA_SERVER_URL) self.assertEqual(bg.origin, BUGZILLA_SERVER_URL) self.assertEqual(bg.tag, 'test') self.assertEqual(bg.max_bugs, 5) self.assertIsNone(bg.client) # When tag is empty or None it will be set to # the value in the origin (URL) bg = Bugzilla(BUGZILLA_SERVER_URL) self.assertEqual(bg.url, BUGZILLA_SERVER_URL) self.assertEqual(bg.origin, BUGZILLA_SERVER_URL) self.assertEqual(bg.tag, BUGZILLA_SERVER_URL) bg = Bugzilla(BUGZILLA_SERVER_URL, tag='') self.assertEqual(bg.url, BUGZILLA_SERVER_URL) self.assertEqual(bg.origin, BUGZILLA_SERVER_URL) self.assertEqual(bg.tag, BUGZILLA_SERVER_URL)
def test_fetch(self): """Test whether a list of bugs is returned""" requests = [] bodies_csv = [ read_file('data/bugzilla/bugzilla_buglist.csv'), read_file('data/bugzilla/bugzilla_buglist_next.csv'), "" ] bodies_xml = [ read_file('data/bugzilla/bugzilla_version.xml', mode='rb'), read_file('data/bugzilla/bugzilla_bugs_details.xml', mode='rb'), read_file('data/bugzilla/bugzilla_bugs_details_next.xml', mode='rb') ] bodies_html = [ read_file('data/bugzilla/bugzilla_bug_activity.html', mode='rb'), read_file('data/bugzilla/bugzilla_bug_activity_empty.html', mode='rb') ] def request_callback(method, uri, headers): if uri.startswith(BUGZILLA_BUGLIST_URL): body = bodies_csv.pop(0) elif uri.startswith(BUGZILLA_BUG_URL): body = bodies_xml.pop(0) else: body = bodies_html[len(requests) % 2] requests.append(httpretty.last_request()) return (200, headers, body) httpretty.register_uri(httpretty.GET, BUGZILLA_BUGLIST_URL, responses=[ httpretty.Response(body=request_callback) for _ in range(3) ]) httpretty.register_uri(httpretty.GET, BUGZILLA_BUG_URL, responses=[ httpretty.Response(body=request_callback) for _ in range(2) ]) httpretty.register_uri(httpretty.GET, BUGZILLA_BUG_ACTIVITY_URL, responses=[ httpretty.Response(body=request_callback) for _ in range(7) ]) bg = Bugzilla(BUGZILLA_SERVER_URL, max_bugs=5, max_bugs_csv=500) bugs = [bug for bug in bg.fetch()] self.assertEqual(len(bugs), 7) self.assertEqual(bugs[0]['data']['bug_id'][0]['__text__'], '15') self.assertEqual(len(bugs[0]['data']['activity']), 0) self.assertEqual(bugs[0]['origin'], BUGZILLA_SERVER_URL) self.assertEqual(bugs[0]['uuid'], '5a8a1e25dfda86b961b4146050883cbfc928f8ec') self.assertEqual(bugs[0]['updated_on'], 1248276445.0) self.assertEqual(bugs[0]['category'], 'bug') self.assertEqual(bugs[0]['tag'], BUGZILLA_SERVER_URL) self.assertEqual(bugs[6]['data']['bug_id'][0]['__text__'], '888') self.assertEqual(len(bugs[6]['data']['activity']), 14) self.assertEqual(bugs[6]['origin'], BUGZILLA_SERVER_URL) self.assertEqual(bugs[6]['uuid'], 'b4009442d38f4241a4e22e3e61b7cd8ef5ced35c') self.assertEqual(bugs[6]['updated_on'], 1439404330.0) self.assertEqual(bugs[6]['category'], 'bug') self.assertEqual(bugs[6]['tag'], BUGZILLA_SERVER_URL) # Check requests expected = [{ 'ctype': ['xml'] }, { 'ctype': ['csv'], 'limit': ['500'], 'order': ['changeddate'], 'chfieldfrom': ['1970-01-01 00:00:00'] }, { 'ctype': ['csv'], 'limit': ['500'], 'order': ['changeddate'], 'chfieldfrom': ['2009-07-30 11:35:33'] }, { 'ctype': ['csv'], 'limit': ['500'], 'order': ['changeddate'], 'chfieldfrom': ['2015-08-12 18:32:11'] }, { 'ctype': ['xml'], 'id': ['15', '18', '17', '20', '19'], 'excludefield': ['attachmentdata'] }, { 'id': ['15'] }, { 'id': ['18'] }, { 'id': ['17'] }, { 'id': ['20'] }, { 'id': ['19'] }, { 'ctype': ['xml'], 'id': ['30', '888'], 'excludefield': ['attachmentdata'] }, { 'id': ['30'] }, { 'id': ['888'] }] self.assertEqual(len(requests), len(expected)) for i in range(len(expected)): self.assertDictEqual(requests[i].querystring, expected[i])
def test_has_resuming(self): """Test if it returns True when has_resuming is called""" self.assertEqual(Bugzilla.has_resuming(), True)
def test_fetch_auth(self): """Test whether authentication works""" requests = [] bodies_csv = [read_file('data/bugzilla/bugzilla_buglist_next.csv'), ""] bodies_xml = [ read_file('data/bugzilla/bugzilla_version.xml', mode='rb'), read_file('data/bugzilla/bugzilla_bugs_details_next.xml', mode='rb') ] bodies_html = [ read_file('data/bugzilla/bugzilla_bug_activity.html', mode='rb'), read_file('data/bugzilla/bugzilla_bug_activity_empty.html', mode='rb') ] def request_callback(method, uri, headers): if uri.startswith(BUGZILLA_LOGIN_URL): body = "index.cgi?logout=1" elif uri.startswith(BUGZILLA_BUGLIST_URL): body = bodies_csv.pop(0) elif uri.startswith(BUGZILLA_BUG_URL): body = bodies_xml.pop(0) else: body = bodies_html[(len(requests) + 1) % 2] requests.append(httpretty.last_request()) return (200, headers, body) httpretty.register_uri( httpretty.POST, BUGZILLA_LOGIN_URL, responses=[httpretty.Response(body=request_callback)]) httpretty.register_uri(httpretty.GET, BUGZILLA_BUGLIST_URL, responses=[ httpretty.Response(body=request_callback) for _ in range(2) ]) httpretty.register_uri( httpretty.GET, BUGZILLA_BUG_URL, responses=[httpretty.Response(body=request_callback)]) httpretty.register_uri(httpretty.GET, BUGZILLA_BUG_ACTIVITY_URL, responses=[ httpretty.Response(body=request_callback) for _ in range(2) ]) from_date = datetime.datetime(2015, 1, 1) bg = Bugzilla(BUGZILLA_SERVER_URL, user='******', password='******') bugs = [bug for bug in bg.fetch(from_date=from_date)] self.assertEqual(len(bugs), 2) self.assertEqual(bugs[0]['data']['bug_id'][0]['__text__'], '30') self.assertEqual(len(bugs[0]['data']['activity']), 14) self.assertEqual(bugs[0]['origin'], BUGZILLA_SERVER_URL) self.assertEqual(bugs[0]['uuid'], '4b166308f205121bc57704032acdc81b6c9bb8b1') self.assertEqual(bugs[0]['updated_on'], 1426868155.0) self.assertEqual(bugs[0]['category'], 'bug') self.assertEqual(bugs[0]['tag'], BUGZILLA_SERVER_URL) self.assertEqual(bugs[1]['data']['bug_id'][0]['__text__'], '888') self.assertEqual(len(bugs[1]['data']['activity']), 0) self.assertEqual(bugs[1]['origin'], BUGZILLA_SERVER_URL) self.assertEqual(bugs[1]['uuid'], 'b4009442d38f4241a4e22e3e61b7cd8ef5ced35c') self.assertEqual(bugs[1]['updated_on'], 1439404330.0) self.assertEqual(bugs[1]['category'], 'bug') self.assertEqual(bugs[1]['tag'], BUGZILLA_SERVER_URL) # Check requests auth_expected = { 'Bugzilla_login': ['*****@*****.**'], 'Bugzilla_password': ['1234'], 'GoAheadAndLogIn': ['Log in'] } expected = [{ 'ctype': ['xml'] }, { 'ctype': ['csv'], 'limit': ['10000'], 'order': ['changeddate'], 'chfieldfrom': ['2015-01-01 00:00:00'] }, { 'ctype': ['csv'], 'limit': ['10000'], 'order': ['changeddate'], 'chfieldfrom': ['2015-08-12 18:32:11'] }, { 'ctype': ['xml'], 'id': ['30', '888'], 'excludefield': ['attachmentdata'] }, { 'id': ['30'] }, { 'id': ['888'] }] # Check authentication request auth_req = requests.pop(0) self.assertDictEqual(auth_req.parsed_body, auth_expected) # Check the rests of the headers self.assertEqual(len(requests), len(expected)) for i in range(len(expected)): self.assertDictEqual(requests[i].querystring, expected[i])
def test_search_fields(self): """Test whether the search_fields is properly set""" requests = [] bodies_csv = [ read_file('data/bugzilla/bugzilla_buglist.csv'), read_file('data/bugzilla/bugzilla_buglist_next.csv'), "" ] bodies_xml = [ read_file('data/bugzilla/bugzilla_version.xml', mode='rb'), read_file('data/bugzilla/bugzilla_bugs_details.xml', mode='rb'), read_file('data/bugzilla/bugzilla_bugs_details_next.xml', mode='rb') ] bodies_html = [ read_file('data/bugzilla/bugzilla_bug_activity.html', mode='rb'), read_file('data/bugzilla/bugzilla_bug_activity_empty.html', mode='rb') ] def request_callback(method, uri, headers): if uri.startswith(BUGZILLA_BUGLIST_URL): body = bodies_csv.pop(0) elif uri.startswith(BUGZILLA_BUG_URL): body = bodies_xml.pop(0) else: body = bodies_html[len(requests) % 2] requests.append(httpretty.last_request()) return 200, headers, body httpretty.register_uri(httpretty.GET, BUGZILLA_BUGLIST_URL, responses=[ httpretty.Response(body=request_callback) for _ in range(3) ]) httpretty.register_uri(httpretty.GET, BUGZILLA_BUG_URL, responses=[ httpretty.Response(body=request_callback) for _ in range(2) ]) httpretty.register_uri(httpretty.GET, BUGZILLA_BUG_ACTIVITY_URL, responses=[ httpretty.Response(body=request_callback) for _ in range(7) ]) bg = Bugzilla(BUGZILLA_SERVER_URL, max_bugs=5, max_bugs_csv=500) bugs = [bug for bug in bg.fetch()] self.assertEqual(len(bugs), 7) bug = bugs[0] self.assertEqual(bg.metadata_id(bug['data']), bug['search_fields']['item_id']) self.assertEqual(bug['data']['product'][0]['__text__'], 'LibreGeoSocial (Android)') self.assertEqual(bug['data']['product'][0]['__text__'], bug['search_fields']['product']) self.assertEqual(bug['data']['component'][0]['__text__'], 'general') self.assertEqual(bug['data']['component'][0]['__text__'], bug['search_fields']['component']) bug = bugs[1] self.assertEqual(bg.metadata_id(bug['data']), bug['search_fields']['item_id']) self.assertEqual(bug['data']['product'][0]['__text__'], 'LibreGeoSocial (Android)') self.assertEqual(bug['data']['product'][0]['__text__'], bug['search_fields']['product']) self.assertEqual(bug['data']['component'][0]['__text__'], 'general') self.assertEqual(bug['data']['component'][0]['__text__'], bug['search_fields']['component']) bug = bugs[2] self.assertEqual(bg.metadata_id(bug['data']), bug['search_fields']['item_id']) self.assertEqual(bug['data']['product'][0]['__text__'], 'Bicho') self.assertEqual(bug['data']['product'][0]['__text__'], bug['search_fields']['product']) self.assertEqual(bug['data']['component'][0]['__text__'], 'General') self.assertEqual(bug['data']['component'][0]['__text__'], bug['search_fields']['component']) bug = bugs[3] self.assertEqual(bg.metadata_id(bug['data']), bug['search_fields']['item_id']) self.assertEqual(bug['data']['product'][0]['__text__'], 'LibreGeoSocial (server)') self.assertEqual(bug['data']['product'][0]['__text__'], bug['search_fields']['product']) self.assertEqual(bug['data']['component'][0]['__text__'], 'general') self.assertEqual(bug['data']['component'][0]['__text__'], bug['search_fields']['component']) bug = bugs[4] self.assertEqual(bg.metadata_id(bug['data']), bug['search_fields']['item_id']) self.assertEqual(bug['data']['product'][0]['__text__'], 'CVSAnalY') self.assertEqual(bug['data']['product'][0]['__text__'], bug['search_fields']['product']) self.assertEqual(bug['data']['component'][0]['__text__'], 'general') self.assertEqual(bug['data']['component'][0]['__text__'], bug['search_fields']['component']) bug = bugs[5] self.assertEqual(bg.metadata_id(bug['data']), bug['search_fields']['item_id']) self.assertEqual(bug['data']['product'][0]['__text__'], 'Bicho') self.assertEqual(bug['data']['product'][0]['__text__'], bug['search_fields']['product']) self.assertEqual(bug['data']['component'][0]['__text__'], 'General') self.assertEqual(bug['data']['component'][0]['__text__'], bug['search_fields']['component']) bug = bugs[6] self.assertEqual(bg.metadata_id(bug['data']), bug['search_fields']['item_id']) self.assertEqual(bug['data']['product'][0]['__text__'], 'CVSAnalY') self.assertEqual(bug['data']['product'][0]['__text__'], bug['search_fields']['product']) self.assertEqual(bug['data']['component'][0]['__text__'], 'general') self.assertEqual(bug['data']['component'][0]['__text__'], bug['search_fields']['component'])
def test_fetch_from_cache(self): """Test whether the cache works""" requests = [] bodies_csv = [ read_file('data/bugzilla_buglist.csv'), read_file('data/bugzilla_buglist_next.csv'), "" ] bodies_xml = [ read_file('data/bugzilla_version.xml', mode='rb'), read_file('data/bugzilla_bugs_details.xml', mode='rb'), read_file('data/bugzilla_bugs_details_next.xml', mode='rb') ] bodies_html = [ read_file('data/bugzilla_bug_activity.html', mode='rb'), read_file('data/bugzilla_bug_activity_empty.html', mode='rb') ] def request_callback(method, uri, headers): if uri.startswith(BUGZILLA_BUGLIST_URL): body = bodies_csv.pop(0) elif uri.startswith(BUGZILLA_BUG_URL): body = bodies_xml.pop(0) else: body = bodies_html[len(requests) % 2] requests.append(httpretty.last_request()) return (200, headers, body) httpretty.register_uri(httpretty.GET, BUGZILLA_BUGLIST_URL, responses=[ httpretty.Response(body=request_callback) \ for _ in range(3) ]) httpretty.register_uri(httpretty.GET, BUGZILLA_BUG_URL, responses=[ httpretty.Response(body=request_callback) \ for _ in range(2) ]) httpretty.register_uri(httpretty.GET, BUGZILLA_BUG_ACTIVITY_URL, responses=[ httpretty.Response(body=request_callback) \ for _ in range(7) ]) # First, we fetch the bugs from the server, storing them # in a cache cache = Cache(self.tmp_path) bg = Bugzilla(BUGZILLA_SERVER_URL, max_bugs=5, cache=cache) bugs = [bug for bug in bg.fetch()] self.assertEqual(len(requests), 13) # Now, we get the bugs from the cache. # The contents should be the same and there won't be # any new request to the server cached_bugs = [bug for bug in bg.fetch_from_cache()] self.assertEqual(len(cached_bugs), len(bugs)) self.assertEqual(len(cached_bugs), 7) self.assertEqual(cached_bugs[0]['data']['bug_id'][0]['__text__'], '15') self.assertEqual(len(cached_bugs[0]['data']['activity']), 0) self.assertEqual(cached_bugs[0]['origin'], BUGZILLA_SERVER_URL) self.assertEqual(cached_bugs[0]['uuid'], '5a8a1e25dfda86b961b4146050883cbfc928f8ec') self.assertEqual(cached_bugs[0]['updated_on'], 1248276445.0) self.assertEqual(cached_bugs[0]['category'], 'bug') self.assertEqual(cached_bugs[0]['tag'], BUGZILLA_SERVER_URL) self.assertEqual(cached_bugs[6]['data']['bug_id'][0]['__text__'], '888') self.assertEqual(len(cached_bugs[6]['data']['activity']), 14) self.assertEqual(cached_bugs[6]['origin'], BUGZILLA_SERVER_URL) self.assertEqual(cached_bugs[6]['uuid'], 'b4009442d38f4241a4e22e3e61b7cd8ef5ced35c') self.assertEqual(cached_bugs[6]['updated_on'], 1439404330.0) self.assertEqual(cached_bugs[6]['category'], 'bug') self.assertEqual(cached_bugs[6]['tag'], BUGZILLA_SERVER_URL) self.assertEqual(len(requests), 13)
def test_fetch_from_date(self): """Test whether a list of bugs is returned from a given date""" requests = [] bodies_csv = [read_file('data/bugzilla_buglist_next.csv'), ""] bodies_xml = [ read_file('data/bugzilla_version.xml', mode='rb'), read_file('data/bugzilla_bugs_details_next.xml', mode='rb') ] bodies_html = [ read_file('data/bugzilla_bug_activity.html', mode='rb'), read_file('data/bugzilla_bug_activity_empty.html', mode='rb') ] def request_callback(method, uri, headers): if uri.startswith(BUGZILLA_BUGLIST_URL): body = bodies_csv.pop(0) elif uri.startswith(BUGZILLA_BUG_URL): body = bodies_xml.pop(0) else: body = bodies_html[len(requests) % 2] requests.append(httpretty.last_request()) return (200, headers, body) httpretty.register_uri(httpretty.GET, BUGZILLA_BUGLIST_URL, responses=[ httpretty.Response(body=request_callback) \ for _ in range(2) ]) httpretty.register_uri( httpretty.GET, BUGZILLA_BUG_URL, responses=[httpretty.Response(body=request_callback)]) httpretty.register_uri(httpretty.GET, BUGZILLA_BUG_ACTIVITY_URL, responses=[ httpretty.Response(body=request_callback) \ for _ in range(2) ]) from_date = datetime.datetime(2015, 1, 1) bg = Bugzilla(BUGZILLA_SERVER_URL) bugs = [bug for bug in bg.fetch(from_date=from_date)] self.assertEqual(len(bugs), 2) self.assertEqual(bugs[0]['data']['bug_id'][0]['__text__'], '30') self.assertEqual(len(bugs[0]['data']['activity']), 14) self.assertEqual(bugs[0]['origin'], BUGZILLA_SERVER_URL) self.assertEqual(bugs[0]['uuid'], '4b166308f205121bc57704032acdc81b6c9bb8b1') self.assertEqual(bugs[0]['updated_on'], 1426868155.0) self.assertEqual(bugs[0]['category'], 'bug') self.assertEqual(bugs[0]['tag'], BUGZILLA_SERVER_URL) self.assertEqual(bugs[1]['data']['bug_id'][0]['__text__'], '888') self.assertEqual(len(bugs[1]['data']['activity']), 0) self.assertEqual(bugs[1]['origin'], BUGZILLA_SERVER_URL) self.assertEqual(bugs[1]['uuid'], 'b4009442d38f4241a4e22e3e61b7cd8ef5ced35c') self.assertEqual(bugs[1]['updated_on'], 1439404330.0) self.assertEqual(bugs[1]['category'], 'bug') self.assertEqual(bugs[1]['tag'], BUGZILLA_SERVER_URL) # Check requests expected = [{ 'ctype': ['xml'] }, { 'ctype': ['csv'], 'limit': ['10000'], 'order': ['changeddate'], 'chfieldfrom': ['2015-01-01 00:00:00'] }, { 'ctype': ['csv'], 'limit': ['10000'], 'order': ['changeddate'], 'chfieldfrom': ['2015-08-12 18:32:11'] }, { 'ctype': ['xml'], 'id': ['30', '888'], 'excludefield': ['attachmentdata'] }, { 'id': ['30'] }, { 'id': ['888'] }] self.assertEqual(len(requests), len(expected)) for i in range(len(expected)): self.assertDictEqual(requests[i].querystring, expected[i])
def setUp(self): super().setUp() self.backend = Bugzilla(BUGZILLA_SERVER_URL, max_bugs=5, max_bugs_csv=500, archive=self.archive)
"--use-created-date", help="Use created date instead of update date", type=lambda s: s.lower() in ['true', 't', 'yes', 'y', '1']) parser.add_argument("-D", "--updated-diff", help="If >=0 skip objects where created + diff > updated", type=int, default=-1) args = parser.parse_args() # print(args) # print ((args.date_from, args.date_to)) bugzilla = Bugzilla(args.url, user=args.user, password=args.password, max_bugs=200, max_bugs_csv=10000, tag=None, archive=None) # print(bugzilla) oids = set() for bug in bugzilla.fetch(category=args.category, from_date=args.date_from): # print(bug) # print(bug.keys()) # print(bug['data'].keys()) product = bug['data']['product'][0]['__text__'] if args.product and args.product != product: continue # dtu = dateutil.parser.parse(bug['data']['delta_ts'][0]['__text__']) dtu = utc.localize(
def test_has_caching(self): """Test if it returns False when has_caching is called""" self.assertEqual(Bugzilla.has_caching(), False)