Exemplo n.º 1
0
def latest_run(model, valid_time):
    with tracer.span(name="download_content"):
        download_content()

    pattern = re.compile("./%s" \
                         "/grib" \
                         "\\/(\d{2})" \
                         "/t" \
                         "/icon-eu_europe_regular-lat-lon_model-level_(\d{10})_(\d{3})_1_T.grib2.bz2" % model)

    max_t: int = 0
    result = None

    with tracer.span(name="parse_content"):
        for i, line in enumerate(open('tmp/content.log')):
            for match in re.finditer(pattern, line):
                matches = match.groups()

                match_valid_at = datetime.datetime.strptime(matches[1], "%Y%m%d%H")
                match_valid_at = pytz.timezone('UTC').localize(match_valid_at)
                match_valid_at = match_valid_at + datetime.timedelta(hours=int(matches[2]))

                delta_t = abs((match_valid_at - valid_time).total_seconds())

                if delta_t <= 30 * 60 and int(matches[1]) > max_t:
                    result = matches
                    max_t = int(matches[1])

    return result
Exemplo n.º 2
0
def hello():
    if request.method == 'POST':
        n = int(request.get_json().get("n"))
    else:
        n = 10

    tracer = initialize_tracer(PROJECT_ID)

    with tracer.span(name="gae_gapic") as span:
        latencies = []

        temp = time.time()
        response = client.publish(topic_path, [{'data': b'first'}])
        assert response.message_ids
        logger.log_text(f"{int((time.time()- temp)*1000)}")

        for i in range(n):
            messages = [{'data': str(i).encode('utf-8')}]
            temp = time.time()
            response = client.publish(topic_path, messages)
            assert response.message_ids
            latencies.append(time.time() - temp)

    logger.log_text(f"{','.join([str(int(i*1000)) for i in latencies])}")
    return ("Published messages.\n")
Exemplo n.º 3
0
def hello_world(request):
    request_json = request.get_json(silent=True)

    if 'n' in request_json:
        n = int(request_json['n'])
    else:
        n = 10

    tracer = initialize_tracer(PROJECT_ID)

    with tracer.span(name="cf_gapic") as span:
        latencies = []

        temp = time.time()
        response = client.publish(topic_path, [{'data': b'first'}])
        assert response.message_ids
        logger.log_text(f"{int((time.time()- temp)*1000)}")

        for i in range(n):
            messages = [{'data': str(i).encode('utf-8')}]
            temp = time.time()
            response = client.publish(topic_path, messages)
            assert response.message_ids
            latencies.append(time.time() - temp)

    logger.log_text(f"{','.join([str(int(i*1000)) for i in latencies])}")
    return ("Published messages.")
Exemplo n.º 4
0
def hello():
    if request.method == 'POST':
        n = int(request.get_json().get("n"))
    else:
        n = 10

    tracer = initialize_tracer(PROJECT_ID)

    with tracer.span(name="gae_apiary") as span:
        latencies = []
        message = base64.b64encode(b'first')
        body = {'messages': [{'data': message.decode('utf-8')}]}

        temp = time.time()
        response = service.projects().topics().publish(
            topic=topic_path,
            body=body,
        ).execute()
        assert response['messageIds']
        logger.log_text(f"{int((time.time()- temp)*1000)}")

        for i in range(n):
            message = base64.b64encode(str(i).encode('utf-8'))
            body = {'messages': [{'data': message.decode('utf-8')}]}
            temp = time.time()
            response = service.projects().topics().publish(
                topic=topic_path,
                body=body,
            ).execute()
            assert response['messageIds']
            latencies.append(time.time() - temp)

    logger.log_text(f"{','.join([str(int(i*1000)) for i in latencies])}")
    return("Published messages.\n")
Exemplo n.º 5
0
def hello():
    if request.method == 'POST':
        n = int(request.get_json().get("n"))
    else:
        n = 10

    tracer = initialize_tracer(PROJECT_ID)

    def get_callback(f, data):
        def callback(f):
            try:
                f.result()
                logger.log_text(f"{int((time.time()- futures[data])*1000)}")
            except Exception:
                raise Exception
        return callback

    with tracer.span(name="gae_cl_mixed") as span:
        futures = dict()
        latencies = []

        futures.update({b"first": time.time()})
        future = client.publish(topic_path, data=b"first")
        future.add_done_callback(get_callback(future, b"first"))

        for i in range(n):
            data = str(i)
            temp = time.time()
            future = client.publish(topic_path, data=data.encode('utf-8'))
            future.result()
            latencies.append(time.time() - temp)

    logger.log_text(f"{','.join([str(int(i*1000)) for i in latencies])}")
    return("Published messages.\n")
Exemplo n.º 6
0
def skewt(latitude, longitude, valid_at):
    with tracer.span(name="skewt") as span:
        span.add_attribute("latitude", str(latitude))
        span.add_attribute("longitude", str(longitude))
        span.add_attribute("valid_at", str(valid_at))

        valid_at_parsed = datetime.datetime.strptime(valid_at, "%Y%m%d%H")
        valid_at_parsed = pytz.timezone('UTC').localize(valid_at_parsed)

        with tracer.span(name="sounding"):
            sounding = load_weather_model_sounding(latitude, longitude, valid_at_parsed)

        with tracer.span(name="plotting"):
            model_time = str(np.datetime_as_string(sounding.metadata.model_time))
            valid_time = str(np.datetime_as_string(sounding.metadata.valid_time))

            model_time_for_file_name = str(np.datetime_as_string(sounding.metadata.model_time, unit='m')).replace(":", "_")
            valid_time_for_file_name = str(np.datetime_as_string(sounding.metadata.valid_time, unit='m')).replace(":", "_")

            full_plot = plot_skewt_icon(sounding=sounding, parcel="surface-based")
            full_plot_filename = f"plot_{sounding.latitude_pretty}_{sounding.longitude_pretty}_" \
                                 f"{model_time_for_file_name}_{valid_time_for_file_name}_full.png"
            full_plot.savefig(full_plot_filename)

            detail_plot = plot_skewt_icon(sounding=sounding, parcel="surface-based", base=1000, top=500, skew=15)
            detail_plot_filename = f"plot_{sounding.latitude_pretty}_{sounding.longitude_pretty}_" \
                                   f"{model_time_for_file_name}_{valid_time_for_file_name}_detail.png"
            detail_plot.savefig(detail_plot_filename)

        with tracer.span(name="cloud_upload"):
            # Google Cloud Upload

            storage_client = storage.Client()
            bucket = storage_client.bucket(config.bucket_name)
            blob_full = bucket.blob(full_plot_filename)
            blob_detail = bucket.blob(detail_plot_filename)

            with ThreadPoolExecutor(max_workers=2) as executor:
                executor.submit(blob_full.upload_from_filename, full_plot_filename)
                executor.submit(blob_detail.upload_from_filename, detail_plot_filename)

            result = json.dumps(SkewTResult(model_time, valid_time,
                                            config.bucket_public_url + full_plot_filename,
                                            config.bucket_public_url + detail_plot_filename).__dict__)
            response = make_response(result)
            response.mimetype = 'application/json'
            return response
Exemplo n.º 7
0
def parameter_all_levels(model, latitude, longitude, run_hour, run_datetime, timestep,
                         parameter, level_type="model_level", base_level=60, top_level=1):
    with tracer.span(name="download"):
        levels = list(range(base_level, top_level - 1, -1))
        paths = [level_path(model, run_hour, run_datetime, timestep, parameter, level, level_type) for level in levels]

        session = requests.sessions.Session()
        with concurrent.futures.ThreadPoolExecutor(max_workers=config.http_download_pool) as executor:
            futures = list(executor.submit(download_file(path, session)) for path in paths)
            concurrent.futures.wait(futures, timeout=None, return_when=ALL_COMPLETED)

    with tracer.span(name="parsing"):
        data_set = xr.open_mfdataset(paths, engine="cfgrib", concat_dim="generalVerticalLayer",
                                     combine='nested', parallel=config.cfgrib_parallel)
        interpolated = data_set.to_array()[0].interp(latitude=latitude, longitude=longitude)
        data = AllLevelDataResult(interpolated.values, interpolated.time.values, interpolated.valid_time.values)
        data_set.close()
    return data
Exemplo n.º 8
0
def parameter_all_levels(model, run, parameter, latitude, longitude, level_type="model_level", session=requests.Session()):
    with tracer.span(name="load_parameters") as span:
        run_hour = run[0]
        run_datetime = run[1]
        timestep = int(run[2])
        logging.info(f"Loading sounding for latitude={latitude} longitude={longitude} with "
                     f"run_hour={run_hour} run_datetime={run_datetime} timestep={timestep} "
                     f"level_type={level_type} and parameter={parameter}")
        span.add_attribute("parameter", parameter)
        span.add_attribute("run_hour", str(run_hour))
        span.add_attribute("run_datetime", str(run_datetime))
        span.add_attribute("timestep", str(timestep))

        levels = np.floor(np.linspace(60, 0, config.level_workers)).astype(int).tolist()
        urls = list()

        for i in range(0, len(levels) - 1):
            base = levels[i]
            top = levels[i + 1] + 1
            # example URL:
            # https://nwp-sounding-mw5zsrftba-ew.a.run.app/48.21/16.37/06/2020030406/4/p
            url = f"{config.sounding_api}" \
                   f"/{latitude}" \
                   f"/{longitude}" \
                   f"/{run_hour}" \
                   f"/{run_datetime}" \
                   f"/{timestep}" \
                   f"/{parameter}" \
                   f"?level_type={level_type}" \
                   f"&base={base}" \
                   f"&top={top}"
            urls.append(url)

        result = AllLevelData(data=np.empty(0), model_time=None, valid_time=None)

        with ThreadPoolExecutor(max_workers=config.level_workers) as executor:
            responses = list(executor.map(session.get, urls))

            for response in responses:
                response.raise_for_status()
                json_result = json.loads(response.content)
                result.data = np.append(result.data, np.array(json_result["data"]))

            json_first = json.loads(responses[0].content)
            result.model_time = np.datetime64(json_result["model_time"])
            result.valid_time = np.datetime64(json_result["valid_time"])

        return result
Exemplo n.º 9
0
def handleHttp(request: 'flask.Request') -> str:
    tracer = initialize_tracer(request)
    req = WebhookRequest()
    res = WebhookResponse()
    try:
        json_format.Parse(request.data, req, ignore_unknown_fields=True)
        if req.query_result.action == "roll":
            with tracer.span(name='roll'):
                handleRoll(req, res)
    except UnfulfillableRequestError as e:
        logging.exception(e)
        if STACKDRIVER_ERROR_REPORTING:
            try:
                client = error_reporting.Client()
                client.report_exception(
                    http_context=error_reporting.build_flask_context(request))
            except Exception:
                logging.exception("Failed to send error report to Google")
        add_fulfillment_messages(res, str(e))
    return json_format.MessageToJson(res)
Exemplo n.º 10
0
def sounding(latitude, longitude, run_hour, run_datetime, timestep, parameter):
    with tracer.span(name="sounding") as span:
        span.add_attribute("latitude", str(latitude))
        span.add_attribute("longitude", str(longitude))
        span.add_attribute("run_hour", str(run_hour))
        span.add_attribute("run_datetime", str(run_datetime))
        span.add_attribute("timestep", str(timestep))
        span.add_attribute("parameter", str(parameter))

        level_type = request.args.get("level_type", "model_level")
        base_level = int(request.args.get("base", "60"))
        top_level = int(request.args.get("top", "1"))

        sounding = parameter_all_levels(config.model, latitude, longitude,
                                        run_hour, run_datetime, int(timestep),
                                        parameter, level_type, base_level, top_level)

        response = make_response(json.dumps(sounding.__dict__))
        response.mimetype = 'application/json'
        return response
Exemplo n.º 11
0
def trace_requests(request):
    """Responds to any HTTP request.
    Args:
        request (flask.Request): HTTP request object.
    Returns:
        The response text or any set of values that can be turned into a
        Response object using
        `make_response <http://flask.pocoo.org/docs/1.0/api/#flask.Flask.make_response>`.
    """
    tracer = initTracer()

    base_name = os.environ.get("FUNCTION_NAME") + "-trace-"

    with tracer.span(name=base_name + "metadata-call"):

        import requests

        r = requests.get(
            "http://metadata.google.internal/computeMetadata/v1/project/numeric-project-id",
            headers={'Metadata-Flavor': 'Google'})

    return r.content
Exemplo n.º 12
0
def load_weather_model_sounding(latitude, longitude, valid_time):
    with tracer.span(name="latest_run"):
        model = "icon-eu"
        run = latest_run(model, valid_time)

    http_session = session()

    with ThreadPoolExecutor(max_workers=config.parameter_all_levels_workers) as executor:
        p_future = executor.submit(parameter_all_levels, model, run, "p", latitude, longitude, session=http_session)
        T_future = executor.submit(parameter_all_levels, model, run, "T", latitude, longitude, session=http_session)
        QV_future = executor.submit(parameter_all_levels, model, run, "QV", latitude, longitude, session=http_session)
        U_future = executor.submit(parameter_all_levels, model, run, "U", latitude, longitude, session=http_session)
        V_future = executor.submit(parameter_all_levels, model, run, "V", latitude, longitude, session=http_session)
        HHL_future = executor.submit(parameter_all_levels, model, run, "HHL", latitude, longitude, "time_invariant", session=http_session)

    # Pressure Pa
    p_raw = p_future.result()
    p = p_raw.data

    # Temperature K
    T = T_future.result().data

    # Specific Humidty kg/kg
    QV = QV_future.result().data

    # Dewpoint K
    Td = mpcalc.dewpoint_from_specific_humidity(QV * units("kg/kg"), T * units.K, p * units.Pa)

    # Wind m/s
    U = U_future.result().data
    V = V_future.result().data

    # Height above MSL for model level
    HHL = HHL_future.result().data

    meta_data = WeatherModelSoundingMetaData(p_raw.model_time, p_raw.valid_time)

    return WeatherModelSounding(latitude, longitude, p, T, QV, Td, U, V, HHL, meta_data)