Esempio n. 1
0
def test_tracker_is_deleted_when_task_is_complete_but_s3_url_not_present(app, client):
    now = datetime.datetime(2015, 7, 14, 23, 19, 42, tzinfo=pytz.UTC)  # freeze time
    older_time = now - datetime.timedelta(seconds=TASK_TIME_OUT + 10)
    old_task_id = "mozilla-central-9213957d166d.tar.gz_testing_mozharness"
    create_fake_tracker_row(app, old_task_id, created_at=older_time, state="SUCCESS")
    setup_buckets(app, cfg)
    with mock.patch("relengapi.blueprints.archiver.tasks.requests.get") as get, \
            mock.patch("relengapi.blueprints.archiver.tasks.requests.head") as head, \
            mock.patch('relengapi.blueprints.archiver.now') as time_traveller:
        time_traveller.return_value = now
        # don't actually hit hg.m.o, we just care about starting a subprocess and
        # returning a 202 accepted
        get.return_value = fake_200_response()
        head.return_value = fake_200_response()

        with app.app_context():
            old_task = tables.ArchiverTask.query.filter(
                tables.ArchiverTask.task_id == old_task_id
            ).first()
            eq_(old_task.created_at, older_time,
                "old_task tracker created_at column doesn't match expected")
            # now query api for an archive that would match an id with a tracker still in the db.
            # Since the task tracker will show as complete yet there is still no s3 url, the current
            # old tracker should be deleted and a new one created along with a new celery task
            client.get('/archiver/hgmo/mozilla-central/9213957d166d?'
                       'subdir=testing/mozharness&preferred_region=us-west-2')
            tracker = tables.ArchiverTask.query.filter(
                tables.ArchiverTask.task_id == old_task_id
            ).first()
            eq_(tracker.created_at, now, "old completed tracker was never re-created")
            eq_(tracker.state, "PENDING", "old completed tracker was never re-created")
def test_tracker_added_when_celery_task_is_created(app, client):
    setup_buckets(app, cfg)
    now = datetime.datetime(2015, 7, 14, 23, 19, 42, tzinfo=pytz.UTC)  # freeze time
    expected_pending_expiry = now + datetime.timedelta(seconds=60)
    expected_tracker_id = "mozilla-central-9213957d166d.tar.gz_testing_mozharness"
    expected_tracker_s3_key = "mozilla-central-9213957d166d.tar.gz/testing/mozharness"
    expected_src_url = cfg['ARCHIVER_HGMO_URL_TEMPLATE'].format(
        repo='mozilla-central', rev='9213957d166d', suffix='tar.gz', subdir='testing/mozharness'
    )
    with mock.patch('relengapi.blueprints.archiver.now') as time_traveller, \
            mock.patch("relengapi.blueprints.archiver.tasks.requests.get") as get, \
            mock.patch("relengapi.blueprints.archiver.tasks.requests.head") as head:
            # don't actually hit hg.m.o, we just care about starting a subprocess and
            # returning a 202 accepted
            get.return_value = fake_200_response()
            head.return_value = fake_200_response()
            time_traveller.return_value = now
            client.get('/archiver/hgmo/mozilla-central/9213957d166d?'
                       'subdir=testing/mozharness&preferred_region=us-west-2')
            with app.app_context():
                tracker = tables.ArchiverTask.query.filter(
                    tables.ArchiverTask.task_id == expected_tracker_id
                ).first()
                eq_(tracker.task_id, expected_tracker_id, "tracker id doesn't match task")
                eq_(tracker.s3_key, expected_tracker_s3_key, "tracker s3_key doesn't match task")
                eq_(tracker.created_at, now, "tracker created_at doesn't match task")
                eq_(tracker.pending_expires_at, expected_pending_expiry,
                    "tracker pending_expires_at doesn't match task")
                eq_(tracker.src_url, expected_src_url, "tracker src_url doesn't match task")
Esempio n. 3
0
def test_successful_upload_archive_response(app):
    setup_buckets(app, cfg)
    rev, repo, subdir, suffix = '203e1025a826', 'mozilla-central', 'testing/mozharness', 'tar.gz'
    key = '{repo}-{rev}.{suffix}'.format(repo=repo, rev=rev, suffix=suffix)
    if subdir:
        key += '/{}'.format(subdir)
    src_url = cfg['ARCHIVER_HGMO_URL_TEMPLATE'].format(
        repo=repo, rev=rev, suffix=suffix, subdir='testing/mozharness')
    with app.app_context():
        with mock.patch("relengapi.blueprints.archiver.tasks.requests.get") as get, \
                mock.patch("relengapi.blueprints.archiver.tasks.requests.head") as head:
            get.return_value = fake_200_response()
            head.return_value = fake_200_response()
            task = create_and_upload_archive.apply_async(args=[src_url, key],
                                                         task_id=key.replace(
                                                             '/', '_'))
    expected_regions = [region for region in cfg['ARCHIVER_S3_BUCKETS']]
    all_regions_have_s3_urls = [
        task.info.get("s3_urls", {}).get(region) for region in expected_regions
    ]
    assert all(
        all_regions_have_s3_urls), "s3 urls not uploaded for each region!"
    assert task.info.get(
        'src_url') == src_url, "src url doesn't match upload response!"
    assert task.state == "SUCCESS", "completed task's state isn't SUCCESS!"
Esempio n. 4
0
def test_tracker_is_deleted_when_task_is_complete_but_s3_url_not_present(app, client):
    now = datetime.datetime(2015, 7, 14, 23, 19, 42, tzinfo=pytz.UTC)  # freeze time
    older_time = now - datetime.timedelta(seconds=TASK_TIME_OUT + 10)
    old_task_id = "mozilla-central-9213957d166d.tar.gz_testing_mozharness"
    create_fake_tracker_row(app, old_task_id, created_at=older_time, state="SUCCESS")
    setup_buckets(app, cfg)
    with mock.patch("relengapi.blueprints.archiver.tasks.requests.get") as get, \
            mock.patch("relengapi.blueprints.archiver.tasks.requests.head") as head, \
            mock.patch('relengapi.blueprints.archiver.now') as time_traveller:
        time_traveller.return_value = now
        # don't actually hit hg.m.o, we just care about starting a subprocess and
        # returning a 202 accepted
        get.return_value = fake_200_response()
        head.return_value = fake_200_response()

        with app.app_context():
            old_task = tables.ArchiverTask.query.filter(
                tables.ArchiverTask.task_id == old_task_id
            ).first()
            eq_(old_task.created_at, older_time,
                "old_task tracker created_at column doesn't match expected")
            # now query api for an archive that would match an id with a tracker still in the db.
            # Since the task tracker will show as complete yet there is still no s3 url, the current
            # old tracker should be deleted and a new one created along with a new celery task
            client.get('/archiver/hgmo/mozilla-central/9213957d166d?'
                       'subdir=testing/mozharness&preferred_region=us-west-2')
            tracker = tables.ArchiverTask.query.filter(
                tables.ArchiverTask.task_id == old_task_id
            ).first()
            eq_(tracker.created_at, now, "old completed tracker was never re-created")
            eq_(tracker.state, "PENDING", "old completed tracker was never re-created")
Esempio n. 5
0
def test_invalid_hg_url(app):
    setup_buckets(app, cfg)
    rev, repo, suffix = "fakeRev", "mozilla-central", "tar.gz"
    key = "{repo}-{rev}.{suffix}".format(repo=repo, rev=rev, suffix=suffix)
    src_url = cfg["ARCHIVER_HGMO_URL_TEMPLATE"].format(repo=repo, rev=rev, suffix=suffix, subdir="testing/mozharness")
    with app.app_context():
        with mock.patch("relengapi.blueprints.archiver.tasks.requests.head") as head:
            head.return_value = fake_404_response()
            task = create_and_upload_archive.apply_async(args=[src_url, key], task_id=key.replace("/", "_"))
    assert "Url not found." in task.info.get("status", {}), "invalid hg url was not caught!"
def test_accepted_response_when_missing_s3_key(app, client):
    setup_buckets(app, cfg)
    with mock.patch("relengapi.blueprints.archiver.tasks.requests.get") as get, \
            mock.patch("relengapi.blueprints.archiver.tasks.requests.head") as head:
        # don't actually hit hg.m.o, we just care about starting a subprocess and
        # returning a 202 accepted
        get.return_value = fake_200_response()
        head.return_value = fake_200_response()
        resp = client.get('/archiver/hgmo/mozilla-central/9213957d166d?'
                          'subdir=testing/mozharness&preferred_region=us-west-2')
    eq_(resp.status_code, 202, resp.status)
Esempio n. 7
0
def test_invalid_hg_url(app):
    setup_buckets(app, cfg)
    rev, repo, suffix = 'fakeRev', 'mozilla-central', 'tar.gz'
    key = '{repo}-{rev}.{suffix}'.format(repo=repo, rev=rev, suffix=suffix)
    src_url = cfg['ARCHIVER_HGMO_URL_TEMPLATE'].format(repo=repo, rev=rev, suffix=suffix,
                                                       subdir='testing/mozharness')
    with app.app_context():
        with mock.patch("relengapi.blueprints.archiver.tasks.requests.get") as get:
            get.return_value = fake_404_response()
            task = create_and_upload_archive.apply_async(args=[src_url, key],
                                                         task_id=key.replace('/', '_'))
    assert "Could not get a valid response from src_url" in task.info.get('status', {}), \
        "invalid hg url was not caught!"
def test_redirect_response_when_found_s3_key(app, client):
    setup_buckets(app, cfg)
    rev, repo, subdir, suffix = '203e1025a826', 'mozilla-central', 'testing/mozharness', 'tar.gz'
    key = '{repo}-{rev}.{suffix}'.format(repo=repo, rev=rev, suffix=suffix)
    if subdir:
        key += '/{}'.format(subdir)
    create_s3_items(app, cfg, key=key)

    resp = client.get(
        '/archiver/hgmo/{repo}/{rev}?subdir={subdir}&suffix={suffix}'.format(
            rev=rev, repo=repo, subdir=subdir, suffix=suffix
        )
    )
    eq_(resp.status_code, 302, resp.status)
Esempio n. 9
0
def test_successful_upload_archive_response(app):
    setup_buckets(app, cfg)
    rev, repo, subdir, suffix = "203e1025a826", "mozilla-central", "testing/mozharness", "tar.gz"
    key = "{repo}-{rev}.{suffix}".format(repo=repo, rev=rev, suffix=suffix)
    if subdir:
        key += "/{}".format(subdir)
    src_url = cfg["ARCHIVER_HGMO_URL_TEMPLATE"].format(repo=repo, rev=rev, suffix=suffix, subdir="testing/mozharness")
    with app.app_context():
        with mock.patch("relengapi.blueprints.archiver.tasks.requests.get") as get, mock.patch(
            "relengapi.blueprints.archiver.tasks.requests.head"
        ) as head:
            get.return_value = fake_200_response()
            head.return_value = fake_200_response()
            task = create_and_upload_archive.apply_async(args=[src_url, key], task_id=key.replace("/", "_"))
    expected_regions = [region for region in cfg["ARCHIVER_S3_BUCKETS"]]
    all_regions_have_s3_urls = [task.info.get("s3_urls", {}).get(region) for region in expected_regions]
    assert all(all_regions_have_s3_urls), "s3 urls not uploaded for each region!"
    assert task.info.get("src_url") == src_url, "src url doesn't match upload response!"
    assert task.state == "SUCCESS", "completed task's state isn't SUCCESS!"