Exemplo n.º 1
0
 def results_gs_url(self):
     return 'gs://{}/{}'.format(config.results_bucket(),
                                self.report.sha_summary_path)
Exemplo n.º 2
0
def process_report(params):
    # Mandatory fields:
    uploader = params['uploader']
    gcs_paths = params.getlist('gcs')
    result_type = params['type']
    # Optional fields:
    callback_url = params.get('callback_url')
    labels = params.get('labels', '')

    assert (
        (result_type == 'single' and len(gcs_paths) == 1) or
        (result_type == 'multiple' and len(gcs_paths) > 1)
    )

    report = wptreport.WPTReport()
    try:
        for gcs_path in gcs_paths:
            _process_chunk(report, gcs_path)
        # To be deprecated once all reports have all the required metadata.
        report.update_metadata(
            revision=params.get('revision'),
            browser_name=params.get('browser_name'),
            browser_version=params.get('browser_version'),
            os_name=params.get('os_name'),
            os_version=params.get('os_version'),
        )
        report.finalize()
    except wptreport.WPTReportError:
        etype, e, tb = sys.exc_info()
        e.path = str(gcs_paths)
        # This will register an error in Stackdriver.
        traceback.print_exception(etype, e, tb)
        # The input is invalid and there is no point to retry, so we return an
        # empty (but successful) response to tell TaskQueue to drop the task.
        return ''

    resp = "{} results loaded from: {}\n".format(
        len(report.results), ' '.join(gcs_paths))

    raw_results_gs_url = 'gs://{}/{}/report.json'.format(
        config.raw_results_bucket(), report.sha_product_path)
    raw_results_url = gsutil.gs_to_public_url(raw_results_gs_url)

    # Abort early if the result already exists in Datastore. This is safe to do
    # because raw_results_url contains both the full revision & checksum of the
    # report content, unique enough to use as a UID.
    if _find_run_by_raw_results(raw_results_url):
        _log.warning(
            'Skipping the task because RawResultsURL already exists: %s',
            raw_results_url)
        return ''

    if result_type == 'single':
        # If the original report isn't chunked, we store it directly without
        # the roundtrip to serialize it back.
        gsutil.copy('gs:/' + gcs_paths[0], raw_results_gs_url)
    else:
        with tempfile.NamedTemporaryFile(suffix='.json.gz') as temp:
            report.serialize_gzip(temp.name)
            gsutil.copy(temp.name, raw_results_gs_url, gzipped=True)

    tempdir = tempfile.mkdtemp()
    try:
        report.populate_upload_directory(output_dir=tempdir)
        # First copy [ID]-summary.json.gz to /wptd/[SHA]/[ID]-summary.json.gz.
        results_gs_url = 'gs://{}/{}'.format(
            config.results_bucket(), report.sha_summary_path)
        gsutil.copy(
            os.path.join(tempdir, report.sha_summary_path),
            results_gs_url,
            gzipped=True)
        # Now /wptd/[SHA] is guaranteed to exist. According to `gsutil cp
        # --help`, copy [ID] to /wptd/[SHA] will create /wptd/[SHA]/[ID].
        gsutil.copy(
            os.path.join(tempdir, report.sha_product_path),
            'gs://{}/{}'.format(config.results_bucket(),
                                report.run_info['revision']),
            gzipped=True, quiet=True)
        resp += "Uploaded to {}\n".format(results_gs_url)
    finally:
        shutil.rmtree(tempdir)

    # Check again because the upload takes a long time.
    # Datastore does not support a query-and-put transaction, so this is only a
    # best effort to avoid duplicate runs.
    if _find_run_by_raw_results(raw_results_url):
        _log.warning(
            'Skipping the task because RawResultsURL already exists: %s',
            raw_results_url)
        return ''

    # Authenticate as "_processor" for create-test-run API.
    secret = _get_uploader_password('_processor')
    test_run_id = wptreport.create_test_run(
        report,
        labels,
        uploader,
        secret,
        gsutil.gs_to_public_url(results_gs_url),
        raw_results_url,
        callback_url)
    assert test_run_id

    success = _after_new_run(report, test_run_id)
    if success:
        resp += "Successfully ran hooks: {}\n".format(', '.join(success))

    return resp
Exemplo n.º 3
0
def task_handler():
    _atomic_write(TIMESTAMP_FILE, str(time.time()))

    params = flask.request.form
    # Mandatory fields:
    uploader = params['uploader']
    gcs_paths = params.getlist('gcs')
    result_type = params['type']
    # Optional fields:
    labels = params.get('labels', '')

    assert ((result_type == 'single' and len(gcs_paths) == 1)
            or (result_type == 'multiple' and len(gcs_paths) > 1))

    report = wptreport.WPTReport()
    try:
        for gcs_path in gcs_paths:
            _process_chunk(report, gcs_path)
        # To be deprecated once all reports have all the required metadata.
        report.update_metadata(
            revision=params.get('revision'),
            browser_name=params.get('browser_name'),
            browser_version=params.get('browser_version'),
            os_name=params.get('os_name'),
            os_version=params.get('os_version'),
        )
        report.finalize()
    except wptreport.WPTReportError:
        etype, e, tb = sys.exc_info()
        e.path = str(gcs_paths)
        # This will register an error in Stackdriver.
        traceback.print_exception(etype, e, tb)
        # The input is invalid and there is no point to retry, so we return 2XX
        # to tell TaskQueue to drop the task.
        return ('', HTTPStatus.NO_CONTENT)

    resp = "{} results loaded from {}\n".format(len(report.results),
                                                str(gcs_paths))

    raw_results_gcs_path = '/{}/{}/report.json'.format(
        config.raw_results_bucket(), report.sha_product_path)
    if result_type == 'single':
        # If the original report isn't chunked, we store it directly without
        # the roundtrip to serialize it back.
        gsutil.copy('gs:/' + gcs_paths[0], 'gs:/' + raw_results_gcs_path)
    else:
        with tempfile.NamedTemporaryFile(suffix='.json.gz') as temp:
            report.serialize_gzip(temp.name)
            gsutil.copy(temp.name, 'gs:/' + raw_results_gcs_path, gzipped=True)

    tempdir = tempfile.mkdtemp()
    try:
        report.populate_upload_directory(output_dir=tempdir)
        results_gcs_path = '/{}/{}'.format(config.results_bucket(),
                                           report.sha_summary_path)
        gsutil.copy(os.path.join(tempdir, report.sha_summary_path),
                    'gs:/' + results_gcs_path,
                    gzipped=True)
        # TODO(Hexcles): Consider switching to gsutil.copy.
        gsutil.rsync_gzip(
            os.path.join(tempdir, report.sha_product_path),
            # The trailing slash is crucial (wpt.fyi#275).
            'gs://{}/{}/'.format(config.results_bucket(),
                                 report.sha_product_path),
            quiet=True)
        resp += "Uploaded to gs:/{}\n".format(results_gcs_path)
    finally:
        shutil.rmtree(tempdir)

    # Authenticate as "_processor" for create-test-run API.
    ds = datastore.Client()
    secret = ds.get(ds.key('Uploader', '_processor'))['Password']
    test_run_id = wptreport.create_test_run(report, labels, uploader, secret,
                                            results_gcs_path,
                                            raw_results_gcs_path)
    assert test_run_id

    # Authenticate as "_spanner" for push-to-spanner API.
    secret = ds.get(ds.key('Uploader', '_spanner'))['Password']
    response = requests.put('%s/api/spanner_push_run?run_id=%d' %
                            (config.project_baseurl(), test_run_id),
                            auth=('_spanner', secret))
    if not response.ok:
        app.logger.error('Bad status code from push-to-spanner API: %d' %
                         response.status_code)

    return (resp, HTTPStatus.CREATED)
Exemplo n.º 4
0
def task_handler():
    _atomic_write(TIMESTAMP_FILE, str(time.time()))

    params = flask.request.form
    # Mandatory fields:
    uploader = params['uploader']
    gcs_path = params['gcs']
    result_type = params['type']
    # Optional fields:
    labels = params.get('labels', '')

    # TODO(Hexcles): Support multiple results.
    assert result_type == 'single'

    match = re.match(r'/([^/]+)/(.*)', gcs_path)
    assert match
    bucket_name, blob_path = match.groups()

    gcs = storage.Client()
    bucket = gcs.get_bucket(bucket_name)
    blob = bucket.blob(blob_path)

    with tempfile.NamedTemporaryFile(suffix='.json') as temp:
        blob.download_to_file(temp)
        temp.seek(0)
        report = wptreport.WPTReport()
        report.load_json(temp)

    # To be deprecated once all reports have all the required metadata.
    report.update_metadata(
        revision=params.get('revision'),
        browser_name=params.get('browser_name'),
        browser_version=params.get('browser_version'),
        os_name=params.get('os_name'),
        os_version=params.get('os_version'),
    )

    resp = "{} results loaded from {}\n".format(len(report.results), gcs_path)

    raw_results_gcs_path = '/{}/{}/report.json'.format(
        config.raw_results_bucket(), report.sha_product_path)
    gsutil.copy('gs:/' + gcs_path, 'gs:/' + raw_results_gcs_path)

    tempdir = tempfile.mkdtemp()
    try:
        report.populate_upload_directory(output_dir=tempdir)
        results_gcs_path = '/{}/{}'.format(config.results_bucket(),
                                           report.sha_summary_path)
        gsutil.copy(os.path.join(tempdir, report.sha_summary_path),
                    'gs:/' + results_gcs_path,
                    gzipped=True)
        gsutil.rsync_gzip(
            os.path.join(tempdir, report.sha_product_path),
            # The trailing slash is crucial (wpt.fyi#275).
            'gs://{}/{}/'.format(config.results_bucket(),
                                 report.sha_product_path),
            quiet=True)
        resp += "Uploaded to gs:/{}\n".format(results_gcs_path)
    finally:
        shutil.rmtree(tempdir)

    ds = datastore.Client()
    secret = ds.get(ds.key('Uploader', '_processor'))['Password']
    wptreport.create_test_run(report, labels, uploader, secret,
                              results_gcs_path, raw_results_gcs_path)

    return resp