def bugs(mock_bugzilla_api_request): from treeherder.etl.bugzilla import BzApiBugProcess from treeherder.model.models import Bugscache process = BzApiBugProcess() process.run() return Bugscache.objects.all()
def test_bz_reopen_bugs(request, mock_bugzilla_reopen_request, client, test_job, test_user, bugs): """ Test expected bugs get reopened. """ bug = bugs[0] client.force_authenticate(user=test_user) incomplete_bugs = [bug for bug in bugs if bug.resolution == "INCOMPLETE"] not_incomplete_bugs = [ bug for bug in bugs if bug.resolution != "INCOMPLETE" ] for bug in [ not_incomplete_bugs[0], not_incomplete_bugs[2], incomplete_bugs[0], incomplete_bugs[2], ]: submit_obj = { u"job_id": test_job.id, u"bug_id": bug.id, u"type": u"manual" } client.post( reverse("bug-job-map-list", kwargs={"project": test_job.repository.name}), data=submit_obj, ) process = BzApiBugProcess() process.run() reopened_bugs = request.config.cache.get('reopened_bugs', None) import json EXPECTED_REOPEN_ATTEMPTS = { 'https://thisisnotbugzilla.org/rest/bug/202': json.dumps({ "status": "REOPENED", "comment": { "body": "New failure instance: https://treeherder.mozilla.org/logviewer?job_id=1&repo=test_treeherder_jobs" }, "comment_tags": "treeherder", }), 'https://thisisnotbugzilla.org/rest/bug/404': json.dumps({ "status": "REOPENED", "comment": { "body": "New failure instance: https://treeherder.mozilla.org/logviewer?job_id=1&repo=test_treeherder_jobs" }, "comment_tags": "treeherder", }), } assert reopened_bugs == EXPECTED_REOPEN_ATTEMPTS
def test_bz_api_process(mock_extract, refdata): process = BzApiBugProcess() process.run() row_data = refdata.dhub.execute( proc='refdata_test.selects.test_bugscache', return_type='tuple' ) # the number of rows inserted should equal to the number of bugs assert len(row_data) == 10 # test that a second ingestion of the same bugs doesn't insert new rows process.run() assert len(row_data) == 10
def test_bz_api_process(mock_bugzilla_api_request): process = BzApiBugProcess() process.run() # the number of rows inserted should equal to the number of bugs assert Bugscache.objects.count() == 17 # test that a second ingestion of the same bugs doesn't insert new rows process.run() assert Bugscache.objects.count() == 17
def test_import(mock_bugscache_bugzilla_request): """ Test importing bug data and building duplicate to open bug relationships. """ from treeherder.etl.bugzilla import BzApiBugProcess BzApiBugProcess().run() bug = Bugscache.objects.get(id=1652208) assert bug.status == "RESOLVED" assert bug.resolution == "DUPLICATE" assert bug.crash_signature == "[@ some::mock_signature]" assert ( bug.summary == "Intermittent dom/canvas/test/webgl-conf/generated/test_2_conformance__ogles__GL__swizzlers__swizzlers_105_to_112.html | Test timed out." ) assert bug.whiteboard == "[we have to do something about this][it's urgent]" assert bug.keywords == "intermittent-failure" assert bug.dupe_of == 1662628 # key: open bug, values: duplicates EXPECTED_BUG_DUPE_OF_DATA = { 1392106: [1442991, 1443801], 1411358: [1204281], 1662628: [1652208, 1660324, 1660719, 1660765, 1663081, 1663118, 1702255], 1736534: [], } for (open_bug, duplicates) in EXPECTED_BUG_DUPE_OF_DATA.items(): assert Bugscache.objects.get(id=open_bug).dupe_of is None assert set( Bugscache.objects.filter(dupe_of=open_bug).values_list( 'id', flat=True)) == set(duplicates) EXPECTED_BUG_COUNT = sum([ 1 + len(duplicates) for duplicates in EXPECTED_BUG_DUPE_OF_DATA.values() ]) assert len(Bugscache.objects.all()) == EXPECTED_BUG_COUNT
def test_bz_api_process(mock_extract, refdata): process = BzApiBugProcess() process.run() row_data = refdata.dhub.execute(proc='refdata_test.selects.test_bugscache', return_type='tuple') refdata.disconnect() # the number of rows inserted should equal to the number of bugs assert len(row_data) == 15 # test that a second ingestion of the same bugs doesn't insert new rows process.run() assert len(row_data) == 15
def fetch_bugs(): """ Run a BzApiBug process """ process = BzApiBugProcess() process.run()
def handle(self, *args, **options): process = BzApiBugProcess() process.run()