Пример #1
0
async def main(path_to_data, serve_bootstrap=True, host=None, port=None, 
    id=None, name="DWD - historical - 1991-2019", description=None, use_async=False):

    config = {
        "path_to_data": path_to_data,
        "port": port, 
        "host": host,
        "id": id,
        "name": name,
        "description": description,
        "serve_bootstrap": serve_bootstrap,
        "in_sr": None,
        "out_sr": None,
        "fbp": False,
        "no_fbp": False,
        "use_async": use_async,
        "to_attr": None, #"climate",
        "latlon_attr": "latlon",
        "start_date_attr": "startDate",
        "end_date_attr": "endDate",
        "mode": "sturdyref", # sturdyref | capability | data
    }
    common.update_config(config, sys.argv, print_config=True, allow_new_keys=False)

    restorer = common.Restorer()
    interpolator, rowcol_to_latlon = ccdi.create_lat_lon_interpolator_from_json_coords_file(config["path_to_data"] + "/" + "latlon-to-rowcol.json")
    meta_plus_data = create_meta_plus_datasets(config["path_to_data"] + "/germany", interpolator, rowcol_to_latlon, restorer)
    service = ccdi.Service(meta_plus_data, id=config["id"], name=config["name"], description=config["description"], restorer=restorer)
    if config["fbp"]:
        fbp(config, climate_capnp.Service._new_client(service))
    else:
        if config["use_async"]:
            await serv.async_init_and_run_service({"service": service}, config["host"], config["port"], 
            serve_bootstrap=config["serve_bootstrap"], restorer=restorer)
        else:
            
            serv.init_and_run_service({"service": service}, config["host"], config["port"], 
                serve_bootstrap=config["serve_bootstrap"], restorer=restorer)
Пример #2
0
PATH_TO_CAPNP_SCHEMAS = PATH_TO_REPO / "capnproto_schemas"
abs_imports = [str(PATH_TO_CAPNP_SCHEMAS)]
common_capnp = capnp.load(str(PATH_TO_CAPNP_SCHEMAS / "common.capnp"), imports=abs_imports) 
geo_capnp = capnp.load(str(PATH_TO_CAPNP_SCHEMAS / "geo.capnp"), imports=abs_imports)

#------------------------------------------------------------------------------

config = {
    "from_name": "utm32n",
    "to_name": "latlon",
    "to_attr": None, #"latlon",
    "from_attr": None, 
    "in_sr": None, # geo.LatLonCoord | geo.UTMCoord | geo.GKCoord
    "out_sr": None # geo.LatLonCoord | geo.UTMCoord | geo.GKCoord
}
common.update_config(config, sys.argv, print_config=True, allow_new_keys=False)

conman = common.ConnectionManager()
inp = conman.try_connect(config["in_sr"], cast_as=common_capnp.Channel.Reader, retry_secs=1)
outp = conman.try_connect(config["out_sr"], cast_as=common_capnp.Channel.Writer, retry_secs=1)

from_type = geo.name_to_struct_type(config["from_name"])

try:
    if inp and outp:
        while True:
            msg = inp.read().wait()
            # check for end of data from in port
            if msg.which() == "done":
                break
            
async def main(path_to_config,
               serve_bootstrap=True,
               host=None,
               port=None,
               id=None,
               name="DWD Core Ensemble",
               description=None,
               reg_sturdy_ref=None,
               use_async=False):

    config = {
        "path_to_config": path_to_config,
        "config_toml_file": "metadata.toml",
        "port": port,
        "host": host,
        "id": id,
        "name": name,
        "description": description,
        "serve_bootstrap": serve_bootstrap,
        "reg_sturdy_ref": reg_sturdy_ref,
        "reg_category": "climate",
        "use_async": use_async,
    }
    common.update_config(config,
                         sys.argv,
                         print_config=True,
                         allow_new_keys=False)

    path_to_config = Path(config["path_to_config"])
    with open(path_to_config / config["config_toml_file"], "rb") as f:
        datasets_config = tomli.load(f)

    if not datasets_config:
        print("Couldn't load datasets configuration from:",
              str(path_to_config / config["config_toml_file"]))
        exit(1)

    general = datasets_config["general"]

    conman = async_helpers.ConnectionManager()
    restorer = common.Restorer()
    interpolator, rowcol_to_latlon = ccdi.create_lat_lon_interpolator_from_json_coords_file(
        path_to_config / general["latlon_to_rowcol_mapping"])
    meta_plus_data = create_meta_plus_datasets(path_to_config, datasets_config,
                                               interpolator, rowcol_to_latlon,
                                               restorer)
    service = ccdi.Service(meta_plus_data,
                           id=config["id"],
                           name=config["name"],
                           description=config["description"],
                           restorer=restorer)

    if config["reg_sturdy_ref"]:
        registrator = await conman.try_connect(config["reg_sturdy_ref"],
                                               cast_as=reg_capnp.Registrator)
        if registrator:
            unreg = await registrator.register(
                ref=service, categoryId=config["reg_category"]).a_wait()
            print("Registered ", config["name"], "climate service.")
            #await unreg.unregister.unregister().a_wait()
        else:
            print("Couldn't connect to registrator at sturdy_ref:",
                  config["reg_sturdy_ref"])

    if config["use_async"]:
        await serv.async_init_and_run_service(
            {"service": service},
            config["host"],
            config["port"],
            serve_bootstrap=config["serve_bootstrap"],
            restorer=restorer,
            conman=conman)
    else:

        serv.init_and_run_service({"service": service},
                                  config["host"],
                                  config["port"],
                                  serve_bootstrap=config["serve_bootstrap"],
                                  restorer=restorer,
                                  conman=conman)