Exemplo n.º 1
0
def run_consumer(leave_after_finished_run=True,
                 server={
                     "server": None,
                     "port": None
                 },
                 shared_id=None):
    "collect data from workers"

    config = {
        "user": "******" if LOCAL_RUN else "container",
        "port": server["port"] if server["port"] else PORT,
        "server": server["server"] if server["server"] else LOCAL_RUN_HOST,
        "start-row": "0",
        "end-row": "-1",
        "shared_id": shared_id,
        "no-of-setups": 2  #None
    }
    config["out"] = PATHS[config["user"]]["local-path-to-output-dir"]
    config["csv-out"] = PATHS[config["user"]]["local-path-to-csv-output-dir"]

    if len(sys.argv) > 1 and __name__ == "__main__":
        for arg in sys.argv[1:]:
            k, v = arg.split("=")
            if k in config:
                config[k] = v

    print("consumer config:", config)

    context = zmq.Context()
    if config["shared_id"]:
        socket = context.socket(zmq.DEALER)
        socket.setsockopt(zmq.IDENTITY, config["shared_id"])
    else:
        socket = context.socket(zmq.PULL)

    socket.connect("tcp://" + config["server"] + ":" + config["port"])

    leave = False
    write_normal_output_files = False

    path_to_soil_grid = TEMPLATE_SOIL_PATH.format(
        local_path_to_data_dir=PATHS[config["user"]]["local-path-to-data-dir"])
    soil_metadata, header = Mrunlib.read_header(path_to_soil_grid)
    soil_grid_template = np.loadtxt(path_to_soil_grid, dtype=int, skiprows=6)
    #set invalid soils / water to no-data
    soil_grid_template[soil_grid_template < 1] = -9999
    soil_grid_template[soil_grid_template > 71] = -9999
    #set all data values to one, to count them later
    soil_grid_template[soil_grid_template != -9999] = 1
    #set all no-data values to 0, to ignore them while counting
    soil_grid_template[soil_grid_template == -9999] = 0
    #count cols in rows
    datacells_per_row = np.sum(soil_grid_template, axis=1)

    start_row = int(config["start-row"])
    end_row = int(config["end-row"])
    ncols = int(soil_metadata["ncols"])
    setup_id_to_data = defaultdict(
        lambda: {
            "start_row":
            start_row,
            "end_row":
            end_row,
            "nrows":
            end_row - start_row + 1 if start_row > 0 and end_row >= start_row
            else int(soil_metadata["nrows"]),
            "ncols":
            ncols,
            "header":
            header,
            "out_dir_exists":
            False,
            "row-col-data":
            defaultdict(lambda: defaultdict(list)),
            "datacell-count":
            datacells_per_row.copy(),
            "next-row":
            start_row
        })

    def process_message(msg):

        if not hasattr(process_message, "wnof_count"):
            process_message.wnof_count = 0
            process_message.setup_count = 0

        leave = False

        if msg["type"] == "finish":
            print("c: received finish message")
            leave = True

        elif not write_normal_output_files:
            custom_id = msg["customId"]
            setup_id = custom_id["setup_id"]

            data = setup_id_to_data[setup_id]

            row = custom_id["srow"]
            col = custom_id["scol"]
            #crow = custom_id.get("crow", -1)
            #ccol = custom_id.get("ccol", -1)
            #soil_id = custom_id.get("soil_id", -1)

            debug_msg = "received work result " + str(process_message.received_env_count) + " customId: " + str(msg.get("customId", "")) \
            + " next row: " + str(data["next-row"]) \
            + " cols@row to go: " + str(data["datacell-count"][row]) + "@" + str(row) + " cells_per_row: " + str(datacells_per_row[row])#\
            #+ " rows unwritten: " + str(data["row-col-data"].keys())
            print(debug_msg)
            #debug_file.write(debug_msg + "\n")
            data["row-col-data"][row][col].append(create_output(msg))
            data["datacell-count"][row] -= 1

            process_message.received_env_count = process_message.received_env_count + 1

            #while data["next-row"] in data["row-col-data"] and data["datacell-count"][data["next-row"]] == 0:
            while data["datacell-count"][data["next-row"]] == 0:

                path_to_out_dir = config["out"] + str(setup_id) + "/"
                path_to_csv_out_dir = config["csv-out"] + str(setup_id) + "/"
                if not data["out_dir_exists"]:
                    if os.path.isdir(path_to_out_dir) and os.path.exists(
                            path_to_out_dir):
                        data["out_dir_exists"] = True
                    else:
                        try:
                            os.makedirs(path_to_out_dir)
                            data["out_dir_exists"] = True
                        except OSError:
                            print("c: Couldn't create dir:", path_to_out_dir,
                                  "! Exiting.")
                            exit(1)
                    if os.path.isdir(path_to_csv_out_dir) and os.path.exists(
                            path_to_csv_out_dir):
                        data["out_dir_exists"] = True
                    else:
                        try:
                            os.makedirs(path_to_csv_out_dir)
                            data["out_dir_exists"] = True
                        except OSError:
                            print("c: Couldn't create dir:",
                                  path_to_csv_out_dir, "! Exiting.")
                            exit(1)

                write_row_to_grids(data["row-col-data"], data["next-row"],
                                   data["ncols"], data["header"],
                                   path_to_out_dir, path_to_csv_out_dir,
                                   setup_id)

                debug_msg = "wrote row: " + str(
                    data["next-row"]) + " next-row: " + str(
                        data["next-row"] + 1) + " rows unwritten: " + str(
                            data["row-col-data"].keys())
                print(debug_msg)
                #debug_file.write(debug_msg + "\n")

                data["next-row"] += 1  # move to next row (to be written)

                if leave_after_finished_run \
                and ((data["end_row"] < 0 and data["next-row"] > data["nrows"]-1) \
                    or (data["end_row"] >= 0 and data["next-row"] > data["end_row"])):

                    process_message.setup_count += 1
                    # if all setups are done, the run_setups list should be empty and we can return
                    if process_message.setup_count >= int(
                            config["no-of-setups"]):
                        print("c: all results received, exiting")
                        leave = True
                        break

        elif write_normal_output_files:

            if msg.get("type",
                       "") in ["jobs-per-cell", "no-data", "setup_data"]:
                #print "ignoring", result.get("type", "")
                return

            print("received work result ", process_message.received_env_count,
                  " customId: ", str(msg.get("customId", "").values()))

            custom_id = msg["customId"]
            setup_id = custom_id["setup_id"]
            row = custom_id["srow"]
            col = custom_id["scol"]
            #crow = custom_id.get("crow", -1)
            #ccol = custom_id.get("ccol", -1)
            #soil_id = custom_id.get("soil_id", -1)

            process_message.wnof_count += 1

            #with open("out/out-" + str(i) + ".csv", 'wb') as _:
            with open(
                    "out-normal/out-" + str(process_message.wnof_count) +
                    ".csv", 'wb') as _:
                writer = csv.writer(_, delimiter=",")

                for data_ in msg.get("data", []):
                    results = data_.get("results", [])
                    orig_spec = data_.get("origSpec", "")
                    output_ids = data_.get("outputIds", [])

                    if len(results) > 0:
                        writer.writerow([orig_spec.replace("\"", "")])
                        for row in monica_io.write_output_header_rows(
                                output_ids,
                                include_header_row=True,
                                include_units_row=True,
                                include_time_agg=False):
                            writer.writerow(row)

                        for row in monica_io.write_output(output_ids, results):
                            writer.writerow(row)

                    writer.writerow([])

            process_message.received_env_count = process_message.received_env_count + 1

        return leave

    process_message.received_env_count = 1

    while not leave:
        try:
            start_time_recv = timeit.default_timer()
            msg = socket.recv_json(encoding="latin-1")
            elapsed = timeit.default_timer() - start_time_recv
            print("time to receive message" + str(elapsed))
            start_time_proc = timeit.default_timer()
            leave = process_message(msg)
            elapsed = timeit.default_timer() - start_time_proc
            print("time to process message" + str(elapsed))
        except Exception as e:
            print("Exception:", e)
            continue

    print("exiting run_consumer()")
Exemplo n.º 2
0
def run_consumer(leave_after_finished_run=True,
                 server={
                     "server": None,
                     "port": None
                 },
                 shared_id=None):
    "collect data from workers"

    config = {
        "mode": "remoteConsumer-remoteMonica",
        "port": server["port"] if server["port"] else DEFAULT_PORT,
        "server": server["server"] if server["server"] else DEFAULT_HOST,
        "start-row": "0",
        "end-row": "-1",
        "shared_id": shared_id,
        "no-of-setups": 2,
        "timeout": 600000  # 10 minutes
    }

    if len(sys.argv) > 1 and __name__ == "__main__":
        for arg in sys.argv[1:]:
            k, v = arg.split("=")
            if k in config:
                config[k] = v

    paths = PATHS[config["mode"]]

    if not "out" in config:
        config["out"] = paths["path-to-output-dir"]
    if not "csv-out" in config:
        config["csv-out"] = paths["path-to-csv-output-dir"]

    print("consumer config:", config)

    context = zmq.Context()
    if config["shared_id"]:
        socket = context.socket(zmq.DEALER)
        socket.setsockopt(zmq.IDENTITY, config["shared_id"])
    else:
        socket = context.socket(zmq.PULL)

    socket.connect("tcp://" + config["server"] + ":" + config["port"])
    socket.RCVTIMEO = config["timeout"]
    leave = False
    write_normal_output_files = False

    path_to_soil_grid = TEMPLATE_SOIL_PATH.format(
        local_path_to_data_dir=paths["path-to-data-dir"])
    soil_metadata, header = Mrunlib.read_header(path_to_soil_grid)
    soil_grid_template = np.loadtxt(path_to_soil_grid, dtype=int, skiprows=6)

    #set invalid soils / water to no-data
    #soil_grid_template[soil_grid_template < 1] = -9999
    #soil_grid_template[soil_grid_template > 71] = -9999

    #unknown_soil_ids = {}
    #soil_db_con = sqlite3.connect(paths["path-to-data-dir"] + DATA_SOIL_DB)
    #for row in range(soil_grid_template.shape[0]):
    #    print(row)
    #    for col in range(soil_grid_template.shape[1]):
    #        soil_id = int(soil_grid_template[row, col])
    #        if soil_id == -9999:
    #            continue
    #        if soil_id in unknown_soil_ids:
    #            if unknown_soil_ids[soil_id]:
    #                soil_grid_template[row, col] = -9999
    #            else:
    #                continue
    #        else:
    #            sp_json = soil_io3.soil_parameters(soil_db_con, soil_id)
    #            if len(sp_json) == 0:
    #                unknown_soil_ids[soil_id] = True
    #                soil_grid_template[row, col] = -9999
    #            else:
    #                unknown_soil_ids[soil_id] = False

    #if USE_CORINE:
    #path_to_corine_grid = TEMPLATE_CORINE_PATH.format(local_path_to_data_dir=paths["path-to-data-dir"])
    #corine_meta, _ = Mrunlib.read_header(path_to_corine_grid)
    #corine_grid = np.loadtxt(path_to_corine_grid, dtype=int, skiprows=6)
    #corine_gk5_interpolate = Mrunlib.create_ascii_grid_interpolator(corine_grid, corine_meta)

    #scols = int(soil_metadata["ncols"])
    #srows = int(soil_metadata["nrows"])
    #scellsize = int(soil_metadata["cellsize"])
    #xllcorner = int(soil_metadata["xllcorner"])
    #yllcorner = int(soil_metadata["yllcorner"])

    #for srow in range(0, srows):
    #print(srow)
    # for scol in range(0, scols):
    #   soil_id = soil_grid_template[srow, scol]
    # if soil_id == -9999:
    # continue

    #get coordinate of clostest climate element of real soil-cell
    #sh_gk5 = yllcorner + (scellsize / 2) + (srows - srow - 1) * scellsize
    #sr_gk5 = xllcorner + (scellsize / 2) + scol * scellsize

    # check if current grid cell is used for agriculture
    #corine_id = corine_gk5_interpolate(sr_gk5, sh_gk5)
    #if corine_id not in [200, 210, 211, 212, 240, 241, 242, 243, 244]:
    # soil_grid_template[srow, scol] = -9999

    #print("filtered through CORINE")

    #set all data values to one, to count them later
    soil_grid_template[soil_grid_template != -9999] = 1
    #set all no-data values to 0, to ignore them while counting
    soil_grid_template[soil_grid_template == -9999] = 0

    #count cols in rows
    datacells_per_row = np.sum(soil_grid_template, axis=1)

    start_row = int(config["start-row"])
    end_row = int(config["end-row"])
    ncols = int(soil_metadata["ncols"])
    setup_id_to_data = defaultdict(
        lambda: {
            "start_row":
            start_row,
            "end_row":
            end_row,
            "nrows":
            end_row - start_row + 1 if start_row > 0 and end_row >= start_row
            else int(soil_metadata["nrows"]),
            "ncols":
            ncols,
            "header":
            header,
            "out_dir_exists":
            False,
            "row-col-data":
            defaultdict(lambda: defaultdict(list)),
            "datacell-count":
            datacells_per_row.copy(),
            "next-row":
            start_row
        })

    def process_message(msg):

        if not hasattr(process_message, "wnof_count"):
            process_message.wnof_count = 0
            process_message.setup_count = 0

        leave = False

        if msg["type"] == "finish":
            print("c: received finish message")
            leave = True

        elif not write_normal_output_files:
            custom_id = msg["customId"]
            setup_id = custom_id["setup_id"]

            data = setup_id_to_data[setup_id]

            row = custom_id["srow"]
            col = custom_id["scol"]
            #crow = custom_id.get("crow", -1)
            #ccol = custom_id.get("ccol", -1)
            #soil_id = custom_id.get("soil_id", -1)

            debug_msg = "received work result " + str(process_message.received_env_count) + " customId: " + str(msg.get("customId", "")) \
            + " next row: " + str(data["next-row"]) \
            + " cols@row to go: " + str(data["datacell-count"][row]) + "@" + str(row) + " cells_per_row: " + str(datacells_per_row[row])#\
            #+ " rows unwritten: " + str(data["row-col-data"].keys())
            print(debug_msg)
            #debug_file.write(debug_msg + "\n")
            data["row-col-data"][row][col].append(create_output(msg))
            data["datacell-count"][row] -= 1

            process_message.received_env_count = process_message.received_env_count + 1

            #while data["next-row"] in data["row-col-data"] and data["datacell-count"][data["next-row"]] == 0:
            while data["datacell-count"][data["next-row"]] == 0:

                path_to_out_dir = config["out"] + str(setup_id) + "/"
                path_to_csv_out_dir = config["csv-out"] + str(setup_id) + "/"
                if not data["out_dir_exists"]:
                    if os.path.isdir(path_to_out_dir) and os.path.exists(
                            path_to_out_dir):
                        data["out_dir_exists"] = True
                    else:
                        try:
                            os.makedirs(path_to_out_dir)
                            data["out_dir_exists"] = True
                        except OSError:
                            print("c: Couldn't create dir:", path_to_out_dir,
                                  "! Exiting.")
                            exit(1)
                    if os.path.isdir(path_to_csv_out_dir) and os.path.exists(
                            path_to_csv_out_dir):
                        data["out_dir_exists"] = True
                    else:
                        try:
                            os.makedirs(path_to_csv_out_dir)
                            data["out_dir_exists"] = True
                        except OSError:
                            print("c: Couldn't create dir:",
                                  path_to_csv_out_dir, "! Exiting.")
                            exit(1)

                write_row_to_grids(data["row-col-data"], data["next-row"],
                                   data["ncols"], data["header"],
                                   path_to_out_dir, path_to_csv_out_dir,
                                   setup_id)

                debug_msg = "wrote row: " + str(
                    data["next-row"]) + " next-row: " + str(
                        data["next-row"] + 1) + " rows unwritten: " + str(
                            list(data["row-col-data"].keys()))
                print(debug_msg)
                #debug_file.write(debug_msg + "\n")

                data["next-row"] += 1  # move to next row (to be written)

                if leave_after_finished_run \
                and ((data["end_row"] < 0 and data["next-row"] > data["nrows"]-1) \
                    or (data["end_row"] >= 0 and data["next-row"] > data["end_row"])):

                    process_message.setup_count += 1
                    # if all setups are done, the run_setups list should be empty and we can return
                    if process_message.setup_count >= int(
                            config["no-of-setups"]):
                        print("c: all results received, exiting")
                        leave = True
                        break

        elif write_normal_output_files:

            if msg.get("type",
                       "") in ["jobs-per-cell", "no-data", "setup_data"]:
                #print "ignoring", result.get("type", "")
                return

            print("received work result ", process_message.received_env_count,
                  " customId: ", str(list(msg.get("customId", "").values())))

            custom_id = msg["customId"]
            setup_id = custom_id["setup_id"]
            row = custom_id["srow"]
            col = custom_id["scol"]
            #crow = custom_id.get("crow", -1)
            #ccol = custom_id.get("ccol", -1)
            #soil_id = custom_id.get("soil_id", -1)

            process_message.wnof_count += 1

            #with open("out/out-" + str(i) + ".csv", 'wb') as _:
            with open("out-normal/out-" + str(process_message.wnof_count) +
                      ".csv",
                      "w",
                      newline='') as _:
                writer = csv.writer(_, delimiter=";")

                for data_ in msg.get("data", []):
                    results = data_.get("results", [])
                    orig_spec = data_.get("origSpec", "")
                    output_ids = data_.get("outputIds", [])

                    if len(results) > 0:
                        writer.writerow([orig_spec.replace("\"", "")])
                        for row in monica_io3.write_output_header_rows(
                                output_ids,
                                include_header_row=True,
                                include_units_row=True,
                                include_time_agg=False):
                            writer.writerow(row)

                        for row in monica_io3.write_output(
                                output_ids, results):
                            writer.writerow(row)

                    writer.writerow([])

            process_message.received_env_count = process_message.received_env_count + 1

        return leave

    process_message.received_env_count = 1

    while not leave:
        try:
            #start_time_recv = timeit.default_timer()
            msg = socket.recv_json(encoding="latin-1")
            #elapsed = timeit.default_timer() - start_time_recv
            #print("time to receive message" + str(elapsed))
            #start_time_proc = timeit.default_timer()
            leave = process_message(msg)
            #elapsed = timeit.default_timer() - start_time_proc
            #print("time to process message" + str(elapsed))
        except zmq.error.Again as _e:
            print('no response from the server (with "timeout"=%d ms) ' %
                  socket.RCVTIMEO)
            return
        except Exception as e:
            print("Exception:", e)
            #continue

    print("exiting run_consumer()")
Exemplo n.º 3
0
def run_producer(server={"server": None, "port": None}, shared_id=None):
    "main"

    context = zmq.Context()
    socket = context.socket(zmq.PUSH)
    #config_and_no_data_socket = context.socket(zmq.PUSH)

    config = {
        "mode": "mbm-local-remote",
        "server-port": server["port"] if server["port"] else DEFAULT_PORT,
        "server": server["server"] if server["server"] else DEFAULT_HOST,
        "start-row": "0",
        "end-row": "-1",
        "sim.json": TEMPLATE_SIM_JSON,
        "crop.json": TEMPLATE_CROP_JSON,
        "site.json": TEMPLATE_SITE_JSON,
        "setups-file": SETUP_FILE,
        "run-setups": RUN_SETUP,
        "shared_id": shared_id
    }

    # read commandline args only if script is invoked directly from commandline
    if len(sys.argv) > 1 and __name__ == "__main__":
        for arg in sys.argv[1:]:
            k, v = arg.split("=")
            if k in config:
                config[k] = v

    print("config:", config)

    # select paths
    paths = PATHS[config["mode"]]
    # open soil db connection
    soil_db_con = sqlite3.connect(paths["path-to-data-dir"] + DATA_SOIL_DB)
    #soil_db_con = cas_sq3.connect(paths["path-to-data-dir"] + DATA_SOIL_DB) #CAS.
    # connect to monica proxy (if local, it will try to connect to a locally started monica)
    socket.connect("tcp://" + config["server"] + ":" +
                   str(config["server-port"]))

    # read setup from csv file
    setups = Mrunlib.read_sim_setups(config["setups-file"])
    run_setups = json.loads(config["run-setups"])
    print("read sim setups: ", config["setups-file"])

    #transforms geospatial coordinates from one coordinate reference system to another
    # transform wgs84 into gk5
    wgs84 = Proj(init="epsg:4326"
                 )  #proj4 -> (World Geodetic System 1984 https://epsg.io/4326)
    gk5 = Proj(init=GEO_TARGET_GRID)

    # Load grids
    ## note numpy is able to load from a compressed file, ending with .gz or .bz2

    # height data for germany
    path_to_dem_grid = paths["path-to-data-dir"] + DATA_GRID_HEIGHT
    dem_metadata, _ = Mrunlib.read_header(path_to_dem_grid)
    dem_grid = np.loadtxt(path_to_dem_grid, dtype=int, skiprows=6)
    dem_gk5_interpolate = Mrunlib.create_ascii_grid_interpolator(
        dem_grid, dem_metadata)
    print("read: ", path_to_dem_grid)

    # slope data
    path_to_slope_grid = paths["path-to-data-dir"] + DATA_GRID_SLOPE
    slope_metadata, _ = Mrunlib.read_header(path_to_slope_grid)
    slope_grid = np.loadtxt(path_to_slope_grid, dtype=float, skiprows=6)
    slope_gk5_interpolate = Mrunlib.create_ascii_grid_interpolator(
        slope_grid, slope_metadata)
    print("read: ", path_to_slope_grid)

    # land use data
    path_to_corine_grid = paths["path-to-data-dir"] + DATA_GRID_LAND_USE
    corine_meta, _ = Mrunlib.read_header(path_to_corine_grid)
    corine_grid = np.loadtxt(path_to_corine_grid, dtype=int, skiprows=6)
    corine_gk5_interpolate = Mrunlib.create_ascii_grid_interpolator(
        corine_grid, corine_meta)
    print("read: ", path_to_corine_grid)

    # soil data
    path_to_soil_grid = paths["path-to-data-dir"] + DATA_GRID_SOIL
    soil_metadata, _ = Mrunlib.read_header(path_to_soil_grid)
    soil_grid = np.loadtxt(path_to_soil_grid, dtype=int, skiprows=6)
    print("read: ", path_to_soil_grid)

    cdict = {}
    climate_data_to_gk5_interpolator = {}
    for run_id in run_setups:
        setup = setups[run_id]
        climate_data = setup["climate_data"]
        if not climate_data in climate_data_to_gk5_interpolator:
            # path to latlon-to-rowcol.json
            path = TEMPLATE_PATH_LATLON.format(
                path_to_climate_dir=paths["path-to-climate-dir"],
                climate_data=climate_data)
            climate_data_to_gk5_interpolator[
                climate_data] = Mrunlib.create_climate_geoGrid_interpolator_from_json_file(
                    path, wgs84, gk5, cdict)
            print("created climate_data to gk5 interpolator: ", path)

    sent_env_count = 1
    start_time = time.clock()

    listOfClimateFiles = set()
    # run calculations for each setup
    for _, setup_id in enumerate(run_setups):

        if setup_id not in setups:
            continue
        start_setup_time = time.clock()

        setup = setups[setup_id]
        climate_data = setup["climate_data"]
        climate_model = setup["climate_model"]
        climate_scenario = setup["climate_scenario"]
        climate_region = setup["climate_region"]

        # read template sim.json
        with open(setup.get("sim.json", config["sim.json"])) as _:
            sim_json = json.load(_)
        # change start and end date acording to setup
        if setup["start_year"]:
            sim_json["climate.csv-options"]["start-date"] = str(
                setup["start_year"]) + "-01-01"
        if setup["end_year"]:
            sim_json["climate.csv-options"]["end-date"] = str(
                setup["end_year"]) + "-12-31"
        sim_json["include-file-base-path"] = paths["include-file-base-path"]

        # read template site.json
        with open(setup.get("site.json", config["site.json"])) as _:
            site_json = json.load(_)
        # read template crop.json
        with open(setup.get("crop.json", config["crop.json"])) as _:
            crop_json = json.load(_)
        crop_json["cropRotation"][0]["worksteps"][0]["date"] = crop_json[
            "cropRotation"][0]["worksteps"][0]["date"].replace(
                "XXXX", str(setup["start_year"]))

        # create environment template from json templates
        env_template = monica_io3.create_env_json_from_json_config({
            "crop":
            crop_json,
            "site":
            site_json,
            "sim":
            sim_json,
            "climate":
            ""
        })
        # set shared id in template
        if config["shared_id"]:
            env_template["sharedId"] = config["shared_id"]

        scols = int(soil_metadata["ncols"])
        srows = int(soil_metadata["nrows"])
        scellsize = int(soil_metadata["cellsize"])
        xllcorner = int(soil_metadata["xllcorner"])
        yllcorner = int(soil_metadata["yllcorner"])

        #print("All Rows x Cols: " + str(srows) + "x" + str(scols), flush=True)
        for srow in range(0, srows):
            #print(srow,)

            if srow < int(config["start-row"]):
                continue
            elif int(config["end-row"]) > 0 and srow > int(config["end-row"]):
                break

            for scol in range(0, scols):
                soil_id = soil_grid[srow, scol]
                if soil_id == -9999:
                    continue
                if soil_id < 1 or soil_id > 71:
                    #print("row/col:", srow, "/", scol, "has unknown soil_id:", soil_id)
                    #unknown_soil_ids.add(soil_id)
                    continue

                #get coordinate of clostest climate element of real soil-cell
                sh_gk5 = yllcorner + (scellsize /
                                      2) + (srows - srow - 1) * scellsize
                sr_gk5 = xllcorner + (scellsize / 2) + scol * scellsize
                #inter = crow/ccol encoded into integer
                crow, ccol = climate_data_to_gk5_interpolator[climate_data](
                    sr_gk5, sh_gk5)

                # check if current grid cell is used for agriculture
                if setup["landcover"]:
                    corine_id = corine_gk5_interpolate(sr_gk5, sh_gk5)
                    if corine_id not in [
                            200, 210, 211, 212, 240, 241, 242, 243, 244
                    ]:
                        continue

                height_nn = dem_gk5_interpolate(sr_gk5, sh_gk5)
                slope = slope_gk5_interpolate(sr_gk5, sh_gk5)

                #print("scol:", scol, "crow/col:", (crow, ccol), "soil_id:", soil_id, "height_nn:", height_nn, "slope:", slope, "seed_harvest_cs:", seed_harvest_cs)

                #with open("dump-" + str(c) + ".json", "w") as jdf:
                #    json.dump({"id": (str(resolution) \
                #        + "|" + str(vrow) + "|" + str(vcol) \
                #        + "|" + str(crow) + "|" + str(ccol) \
                #        + "|" + str(soil_id) \
                #        + "|" + str(uj_id)), "sowing": worksteps[0], "harvest": worksteps[1]}, jdf, indent=2)
                #    c += 1

                env_template["params"]["userCropParameters"][
                    "__enable_T_response_leaf_expansion__"] = setup[
                        "LeafExtensionModifier"]

                # set soil-profile
                sp_json = soil_io3.soil_parameters(soil_db_con, int(soil_id))
                soil_profile = monica_io3.find_and_replace_references(
                    sp_json, sp_json)["result"]

                #print("soil:", soil_profile)

                env_template["params"]["siteParameters"][
                    "SoilProfileParameters"] = soil_profile

                # setting groundwater level
                if setup["groundwater-level"]:
                    groundwaterlevel = 20
                    layer_depth = 0
                    for layer in soil_profile:
                        if layer.get("is_in_groundwater", False):
                            groundwaterlevel = layer_depth
                            #print("setting groundwaterlevel of soil_id:", str(soil_id), "to", groundwaterlevel, "m")
                            break
                        layer_depth += Mrunlib.get_value(layer["Thickness"])
                    env_template["params"]["userEnvironmentParameters"][
                        "MinGroundwaterDepthMonth"] = 3
                    env_template["params"]["userEnvironmentParameters"][
                        "MinGroundwaterDepth"] = [
                            max(0, groundwaterlevel - 0.2), "m"
                        ]
                    env_template["params"]["userEnvironmentParameters"][
                        "MaxGroundwaterDepth"] = [groundwaterlevel + 0.2, "m"]

                # setting impenetrable layer
                if setup["impenetrable-layer"]:
                    impenetrable_layer_depth = Mrunlib.get_value(
                        env_template["params"]["userEnvironmentParameters"]
                        ["LeachingDepth"])
                    layer_depth = 0
                    for layer in soil_profile:
                        if layer.get("is_impenetrable", False):
                            impenetrable_layer_depth = layer_depth
                            #print("setting leaching depth of soil_id:", str(soil_id), "to", impenetrable_layer_depth, "m")
                            break
                        layer_depth += Mrunlib.get_value(layer["Thickness"])
                    env_template["params"]["userEnvironmentParameters"][
                        "LeachingDepth"] = [impenetrable_layer_depth, "m"]
                    env_template["params"]["siteParameters"][
                        "ImpenetrableLayerDepth"] = [
                            impenetrable_layer_depth, "m"
                        ]

                if setup["elevation"]:
                    env_template["params"]["siteParameters"][
                        "heightNN"] = float(height_nn)

                if setup["slope"]:
                    env_template["params"]["siteParameters"][
                        "slope"] = slope / 100.0

                if setup["latitude"]:
                    clat, _ = cdict[(crow, ccol)]
                    env_template["params"]["siteParameters"]["Latitude"] = clat

                if setup["CO2"]:
                    env_template["params"]["userEnvironmentParameters"][
                        "AtmosphericCO2"] = float(setup["CO2"])

                if setup["O3"]:
                    env_template["params"]["userEnvironmentParameters"][
                        "AtmosphericO3"] = float(setup["O3"])

                env_template["params"]["simulationParameters"][
                    "UseNMinMineralFertilisingMethod"] = setup["fertilization"]
                env_template["params"]["simulationParameters"][
                    "UseAutomaticIrrigation"] = setup["irrigation"]

                env_template["params"]["simulationParameters"][
                    "NitrogenResponseOn"] = setup["NitrogenResponseOn"]
                env_template["params"]["simulationParameters"][
                    "WaterDeficitResponseOn"] = setup["WaterDeficitResponseOn"]
                env_template["params"]["simulationParameters"][
                    "EmergenceMoistureControlOn"] = setup[
                        "EmergenceMoistureControlOn"]
                env_template["params"]["simulationParameters"][
                    "EmergenceFloodingControlOn"] = setup[
                        "EmergenceFloodingControlOn"]

                env_template["csvViaHeaderOptions"] = sim_json[
                    "climate.csv-options"]

                subpath_to_csv = TEMPLATE_PATH_CLIMATE_CSV.format(
                    climate_data=climate_data,
                    climate_model_folder=(climate_model +
                                          "/" if climate_model else ""),
                    climate_scenario_folder=(climate_scenario +
                                             "/" if climate_scenario else ""),
                    climate_region=climate_region,
                    crow=str(crow),
                    ccol=str(ccol))
                # subpath_to_csv = climate_data + "/csvs/" \
                # + (climate_model + "/" if climate_model else "") \
                # + (climate_scenario + "/" if climate_scenario else "") \
                # + climate_region + "/row-" + str(crow) + "/col-" + str(ccol) + ".csv"
                env_template["pathToClimateCSV"] = paths[
                    "monica-path-to-climate-dir"] + subpath_to_csv
                #print(env_template["pathToClimateCSV"])
                if DEBUG_WRITE_CLIMATE:
                    listOfClimateFiles.add(subpath_to_csv)

                env_template["customId"] = {
                    "setup_id": setup_id,
                    "srow": srow,
                    "scol": scol,
                    "crow": int(crow),
                    "ccol": int(ccol),
                    "soil_id": int(soil_id)
                }

                if not DEBUG_DONOT_SEND:
                    socket.send_json(env_template)
                    print("sent env ",
                          sent_env_count,
                          " customId: ",
                          env_template["customId"],
                          flush=True)

                sent_env_count += 1

                # write debug output, as json file
                if DEBUG_WRITE:
                    if not os.path.exists(DEBUG_WRITE_FOLDER):
                        os.makedirs(DEBUG_WRITE_FOLDER)
                    if sent_env_count < DEBUG_ROWS:

                        path_to_debug_file = DEBUG_WRITE_FOLDER + "/row_" + str(
                            sent_env_count - 1) + "_" + str(setup_id) + ".json"

                        if not os.path.isfile(path_to_debug_file):
                            with open(path_to_debug_file, "w") as _:
                                _.write(json.dumps(env_template))
                        else:
                            print("WARNING: Row ", (sent_env_count - 1),
                                  " already exists")
            #print("unknown_soil_ids:", unknown_soil_ids)

            #print("crows/cols:", crows_cols)
        stop_setup_time = time.clock()
        print("Setup ", (sent_env_count - 1),
              " envs took ", (stop_setup_time - start_setup_time),
              " seconds",
              flush=True)

    stop_time = time.clock()

    # write summary of used json files
    if DEBUG_WRITE_CLIMATE:
        if not os.path.exists(DEBUG_WRITE_FOLDER):
            os.makedirs(DEBUG_WRITE_FOLDER)

        path_to_climate_summary = DEBUG_WRITE_FOLDER + "/climate_file_list" + ".csv"
        with open(path_to_climate_summary, "w") as _:
            _.write('\n'.join(listOfClimateFiles))

    try:
        print("sending ", (sent_env_count - 1),
              " envs took ", (stop_time - start_time),
              " seconds",
              flush=True)
        #print("ran from ", start, "/", row_cols[start], " to ", end, "/", row_cols[end]
        print("exiting run_producer()", flush=True)
    except Exception:
        raise
Exemplo n.º 4
0
def run_producer(server={"server": None, "port": None}, shared_id=None):
    "main"

    context = zmq.Context()
    socket = context.socket(zmq.PUSH)  # pylint: disable=no-member
    #config_and_no_data_socket = context.socket(zmq.PUSH)

    config = {
        "mode": "remoteProducer-remoteMonica",
        "server-port": server["port"] if server["port"] else DEFAULT_PORT,
        "server": server["server"] if server["server"] else DEFAULT_HOST,
        "start-row": "0",
        "end-row": "-1",
        "sim.json": TEMPLATE_SIM_JSON,
        "crop.json": TEMPLATE_CROP_JSON,
        "site.json": TEMPLATE_SITE_JSON,
        "setups-file": SETUP_FILE,
        "run-setups": RUN_SETUP,
        "shared_id": shared_id
    }

    # read commandline args only if script is invoked directly from commandline
    if len(sys.argv) > 1 and __name__ == "__main__":
        for arg in sys.argv[1:]:
            k, v = arg.split("=")
            if k in config:
                config[k] = v

    print("config:", config)

    # select paths
    paths = PATHS[config["mode"]]
    # open soil db connection
    soil_db_con = sqlite3.connect(paths["path-to-data-dir"] + DATA_SOIL_DB)
    #soil_db_con = cas_sq3.connect(paths["path-to-data-dir"] + DATA_SOIL_DB) #CAS.
    # connect to monica proxy (if local, it will try to connect to a locally started monica)
    socket.connect("tcp://" + config["server"] + ":" +
                   str(config["server-port"]))

    # read setup from csv file
    setups = Mrunlib.read_sim_setups(paths["path-to-projects-dir"] +
                                     PROJECT_FOLDER + config["setups-file"])
    run_setups = json.loads(config["run-setups"])
    print(
        "read sim setups: ",
        paths["path-to-projects-dir"] + PROJECT_FOLDER + config["setups-file"])

    #transforms geospatial coordinates from one coordinate reference system to another
    # transform wgs84 into gk5
    wgs84 = Proj(init="epsg:4326"
                 )  #proj4 -> (World Geodetic System 1984 https://epsg.io/4326)
    gk5 = Proj(init=GEO_TARGET_GRID)

    # dictionary key = cropId value = [interpolate,  data dictionary, is-winter-crop]
    ilr_seed_harvest_data = defaultdict(lambda: {
        "interpolate": None,
        "data": defaultdict(dict),
        "is-winter-crop": None
    })

    # add crop id from setup file
    crops_in_setups = set()
    for setup_id, setup in setups.items():
        for crop_id in setup["crop-ids"].split("_"):
            crops_in_setups.add(crop_id)

    for crop_id in crops_in_setups:
        try:
            #read seed/harvest dates for each crop_id
            path_harvest = TEMPLATE_PATH_HARVEST.format(
                path_to_projects_dir=paths["path-to-projects-dir"],
                project_folder=PROJECT_FOLDER,
                crop_id=crop_id)
            print("created seed harvest gk5 interpolator and read data: ",
                  path_harvest)
            Mrunlib.create_seed_harvest_geoGrid_interpolator_and_read_data(
                path_harvest, wgs84, gk5, ilr_seed_harvest_data)
        except IOError:
            print(
                "Couldn't read file:", paths["path-to-projects-dir"] +
                PROJECT_FOLDER + "ILR_SEED_HARVEST_doys_" + crop_id + ".csv")
            continue

    # Load grids
    ## note numpy is able to load from a compressed file, ending with .gz or .bz2

    # height data for germany
    path_to_dem_grid = paths["path-to-data-dir"] + DATA_GRID_HEIGHT
    dem_metadata, _ = Mrunlib.read_header(path_to_dem_grid)
    dem_grid = np.loadtxt(path_to_dem_grid, dtype=int, skiprows=6)
    dem_gk5_interpolate = Mrunlib.create_ascii_grid_interpolator(
        dem_grid, dem_metadata)
    print("read: ", path_to_dem_grid)

    # slope data
    path_to_slope_grid = paths["path-to-data-dir"] + DATA_GRID_SLOPE
    slope_metadata, _ = Mrunlib.read_header(path_to_slope_grid)
    slope_grid = np.loadtxt(path_to_slope_grid, dtype=float, skiprows=6)
    slope_gk5_interpolate = Mrunlib.create_ascii_grid_interpolator(
        slope_grid, slope_metadata)
    print("read: ", path_to_slope_grid)

    # land use data
    path_to_corine_grid = paths["path-to-data-dir"] + DATA_GRID_LAND_USE
    corine_meta, _ = Mrunlib.read_header(path_to_corine_grid)
    corine_grid = np.loadtxt(path_to_corine_grid, dtype=int, skiprows=6)
    corine_gk5_interpolate = Mrunlib.create_ascii_grid_interpolator(
        corine_grid, corine_meta)
    print("read: ", path_to_corine_grid)

    # soil data
    path_to_soil_grid = paths["path-to-data-dir"] + DATA_GRID_SOIL
    soil_metadata, _ = Mrunlib.read_header(path_to_soil_grid)
    soil_grid = np.loadtxt(path_to_soil_grid, dtype=int, skiprows=6)
    print("read: ", path_to_soil_grid)

    # rNfactor data
    path_to_rnf_grid = paths["path-to-data-dir"] + DATA_GRID_RNFACTOR
    rnf_meta, _ = Mrunlib.read_header(path_to_rnf_grid)
    rnf_grid = np.loadtxt(path_to_rnf_grid, dtype=float, skiprows=6)
    rnf_gk5_interpolate = Mrunlib.create_ascii_grid_interpolator(
        rnf_grid, rnf_meta)
    print("read: ", path_to_rnf_grid)

    cdict = {}
    climate_data_to_gk5_interpolator = {}
    for run_id in run_setups:
        setup = setups[run_id]
        climate_data = setup["climate_data"]
        if not climate_data in climate_data_to_gk5_interpolator:
            # path to latlon-to-rowcol.json
            path = TEMPLATE_PATH_LATLON.format(
                path_to_climate_dir=paths["path-to-climate-dir"],
                climate_data=climate_data)
            climate_data_to_gk5_interpolator[
                climate_data] = Mrunlib.create_climate_geoGrid_interpolator_from_json_file(
                    path, wgs84, gk5, cdict)
            print("created climate_data to gk5 interpolator: ", path)

    sent_env_count = 1
    start_time = time.clock()

    listOfClimateFiles = set()
    # run calculations for each setup
    for _, setup_id in enumerate(run_setups):

        if setup_id not in setups:
            continue
        start_setup_time = time.clock()

        setup = setups[setup_id]
        climate_data = setup["climate_data"]
        climate_model = setup["climate_model"]
        climate_scenario = setup["climate_scenario"]
        climate_region = setup["climate_region"]
        crop_ids = setup["crop-ids"].split("_")

        # read template sim.json
        with open(setup.get("sim.json", config["sim.json"])) as _:
            sim_json = json.load(_)
        # change start and end date acording to setup
        if setup["start_year"]:
            sim_json["climate.csv-options"]["start-date"] = str(
                setup["start_year"]) + "-01-01"
        if setup["end_year"]:
            sim_json["climate.csv-options"]["end-date"] = str(
                setup["end_year"]) + "-12-31"
        sim_json["include-file-base-path"] = paths["include-file-base-path"]

        # read template site.json
        with open(setup.get("site.json", config["site.json"])) as _:
            site_json = json.load(_)
        # read template crop.json
        with open(setup.get("crop.json", config["crop.json"])) as _:
            crop_json = json.load(_)

        # create environment template from json templates
        env_template = monica_io3.create_env_json_from_json_config({
            "crop":
            crop_json,
            "site":
            site_json,
            "sim":
            sim_json,
            "climate":
            ""
        })
        # set shared id in template
        if config["shared_id"]:
            env_template["sharedId"] = config["shared_id"]

        crop_rotation_copy = copy.deepcopy(env_template["cropRotation"])

        # create crop rotation according to setup
        # clear crop rotation and get its template
        #  crop_rotation_templates = env_template.pop("cropRotation")
        #  env_template["cropRotation"] = []
        # get correct template
        #  env_template["cropRotation"] = crop_rotation_templates[crop_id]

        # we just got one cultivation method in our rotation
        #  worksteps_templates_dict = env_template["cropRotation"][0].pop("worksteps")

        # clear the worksteps array and rebuild it out of the setup
        #  worksteps = env_template["cropRotation"][0]["worksteps"] = []
        #  worksteps.append(worksteps_templates_dict["sowing"][setup["sowing-date"]])
        #  worksteps.append(worksteps_templates_dict["harvest"][setup["harvest-date"]])

        scols = int(soil_metadata["ncols"])
        srows = int(soil_metadata["nrows"])
        scellsize = int(soil_metadata["cellsize"])
        xllcorner = int(soil_metadata["xllcorner"])
        yllcorner = int(soil_metadata["yllcorner"])

        #unknown_soil_ids = set()
        soil_id_cache = {}
        print("All Rows x Cols: " + str(srows) + "x" + str(scols))
        for srow in range(0, srows):
            print(srow, )

            if srow != 238:
                continue

            if srow < int(config["start-row"]):
                continue
            elif int(config["end-row"]) > 0 and srow > int(config["end-row"]):
                break

            for scol in range(0, scols):

                if scol != 107:
                    continue

                soil_id = int(soil_grid[srow, scol])
                if soil_id == -9999:
                    continue

                if soil_id in soil_id_cache:
                    sp_json = soil_id_cache[soil_id]
                else:
                    sp_json = soil_io3.soil_parameters(soil_db_con, soil_id)
                    soil_id_cache[soil_id] = sp_json

                if len(sp_json) == 0:
                    print("row/col:", srow, "/", scol, "has unknown soil_id:",
                          soil_id)
                    #unknown_soil_ids.add(soil_id)
                    continue

                #get coordinate of clostest climate element of real soil-cell
                sh_gk5 = yllcorner + (scellsize /
                                      2) + (srows - srow - 1) * scellsize
                sr_gk5 = xllcorner + (scellsize / 2) + scol * scellsize
                #inter = crow/ccol encoded into integer
                crow, ccol = climate_data_to_gk5_interpolator[climate_data](
                    sr_gk5, sh_gk5)

                # check if current grid cell is used for agriculture
                if setup["landcover"]:
                    corine_id = corine_gk5_interpolate(sr_gk5, sh_gk5)
                    if corine_id not in [
                            200, 210, 211, 212, 240, 241, 242, 243, 244
                    ]:
                        continue

                rNfactor = rnf_gk5_interpolate(sr_gk5, sh_gk5)
                height_nn = dem_gk5_interpolate(sr_gk5, sh_gk5)
                slope = slope_gk5_interpolate(sr_gk5, sh_gk5)

                for i, crop_id in enumerate(crop_ids):

                    worksteps = env_template["cropRotation"][i]["worksteps"]
                    worksteps_copy = crop_rotation_copy[i]["worksteps"]

                    ilr_interpolate = ilr_seed_harvest_data[crop_id][
                        "interpolate"]
                    seed_harvest_cs = ilr_interpolate(
                        sr_gk5, sh_gk5) if ilr_interpolate else None

                    print("scol:", scol, "crow/col:", (crow, ccol), "crop_id:",
                          crop_id, "soil_id:", soil_id, "height_nn:",
                          height_nn, "slope:", slope, "seed_harvest_cs:",
                          seed_harvest_cs)

                    # multiply rNFactor onto mineral nitrogen fertilizer amounts
                    for k, workstep in enumerate(worksteps):
                        workstep_copy = worksteps_copy[k]
                        if workstep["type"] == "MineralFertilization":
                            if type(workstep["amount"]) == list:
                                workstep["amount"][
                                    0] = workstep_copy["amount"][0] * rNfactor
                            elif type(workstep["amount"]) == float:
                                workstep["amount"] = workstep_copy[
                                    "amount"] * rNfactor

                    # set external seed/harvest dates
                    if seed_harvest_cs:
                        seed_harvest_data = ilr_seed_harvest_data[crop_id][
                            "data"][seed_harvest_cs]
                        if seed_harvest_data:
                            is_winter_crop = ilr_seed_harvest_data[crop_id][
                                "is-winter-crop"]

                            if setup["sowing-date"] == "fixed":
                                sowing_date = seed_harvest_data["sowing-date"]
                            elif setup["sowing-date"] == "auto":
                                sowing_date = seed_harvest_data[
                                    "latest-sowing-date"]

                            sds = [int(x) for x in sowing_date.split("-")]
                            sd = date(2001, sds[1], sds[2])
                            sdoy = sd.timetuple().tm_yday

                            if setup["harvest-date"] == "fixed":
                                harvest_date = seed_harvest_data[
                                    "harvest-date"]
                            elif setup["harvest-date"] == "auto":
                                harvest_date = seed_harvest_data[
                                    "latest-harvest-date"]

                            #print("sowing_date:", sowing_date, "harvest_date:", harvest_date)

                            hds = [int(x) for x in harvest_date.split("-")]
                            hd = date(2001, hds[1], hds[2])
                            hdoy = hd.timetuple().tm_yday

                            esds = [
                                int(x) for x in
                                seed_harvest_data["earliest-sowing-date"].
                                split("-")
                            ]
                            esd = date(2001, esds[1], esds[2])

                            # sowing after harvest should probably never occur in both fixed setup!
                            if setup["sowing-date"] == "fixed" and setup[
                                    "harvest-date"] == "fixed":
                                if is_winter_crop:
                                    calc_harvest_date = date(
                                        2000, 12, 31) + timedelta(
                                            days=min(hdoy, sdoy - 1))
                                else:
                                    calc_harvest_date = date(
                                        2000, 12, 31) + timedelta(days=hdoy)
                                worksteps[0]["date"] = seed_harvest_data[
                                    "sowing-date"]
                                worksteps[-1][
                                    "date"] = "{:04d}-{:02d}-{:02d}".format(
                                        hds[0], calc_harvest_date.month,
                                        calc_harvest_date.day)

                            elif setup["sowing-date"] == "fixed" and setup[
                                    "harvest-date"] == "auto":
                                if is_winter_crop:
                                    calc_harvest_date = date(
                                        2000, 12, 31) + timedelta(
                                            days=min(hdoy, sdoy - 1))
                                else:
                                    calc_harvest_date = date(
                                        2000, 12, 31) + timedelta(days=hdoy)
                                worksteps[0]["date"] = seed_harvest_data[
                                    "sowing-date"]
                                worksteps[1][
                                    "latest-date"] = "{:04d}-{:02d}-{:02d}".format(
                                        hds[0], calc_harvest_date.month,
                                        calc_harvest_date.day)

                            elif setup["sowing-date"] == "auto" and setup[
                                    "harvest-date"] == "fixed":
                                worksteps[0][
                                    "earliest-date"] = seed_harvest_data[
                                        "earliest-sowing-date"] if esd > date(
                                            esd.year, 6, 20
                                        ) else "{:04d}-{:02d}-{:02d}".format(
                                            sds[0], 6, 20)
                                calc_sowing_date = date(
                                    2000, 12,
                                    31) + timedelta(days=max(hdoy + 1, sdoy))
                                worksteps[0][
                                    "latest-date"] = "{:04d}-{:02d}-{:02d}".format(
                                        sds[0], calc_sowing_date.month,
                                        calc_sowing_date.day)
                                worksteps[1]["date"] = seed_harvest_data[
                                    "harvest-date"]

                            elif setup["sowing-date"] == "auto" and setup[
                                    "harvest-date"] == "auto":
                                worksteps[0][
                                    "earliest-date"] = seed_harvest_data[
                                        "earliest-sowing-date"] if esd > date(
                                            esd.year, 6, 20
                                        ) else "{:04d}-{:02d}-{:02d}".format(
                                            sds[0], 6, 20)
                                if is_winter_crop:
                                    calc_harvest_date = date(
                                        2000, 12, 31) + timedelta(
                                            days=min(hdoy, sdoy - 1))
                                else:
                                    calc_harvest_date = date(
                                        2000, 12, 31) + timedelta(days=hdoy)
                                worksteps[0][
                                    "latest-date"] = seed_harvest_data[
                                        "latest-sowing-date"]
                                worksteps[1][
                                    "latest-date"] = "{:04d}-{:02d}-{:02d}".format(
                                        hds[0], calc_harvest_date.month,
                                        calc_harvest_date.day)

                        #print("dates: ", int(seed_harvest_cs), ":", worksteps[0]["earliest-date"], "<", worksteps[0]["latest-date"] )
                        #print("dates: ", int(seed_harvest_cs), ":", worksteps[1]["latest-date"], "<", worksteps[0]["earliest-date"], "<", worksteps[0]["latest-date"] )

                        #print("dates: ", int(seed_harvest_cs), ":", worksteps[0]["date"])
                        #print("dates: ", int(seed_harvest_cs), ":", worksteps[-1]["date"])

                #print("sowing:", worksteps[0], "harvest:", worksteps[1])

                #with open("dump-" + str(c) + ".json", "w") as jdf:
                #   json.dump({"id": (str(resolution) \
                #      + "|" + str(vrow) + "|" + str(vcol) \
                #     + "|" + str(crow) + "|" + str(ccol) \
                #    + "|" + str(soil_id) \
                #    + "|" + crop_id \
                #    + "|" + str(uj_id)), "sowing": worksteps[0], "harvest": worksteps[1]}, jdf, indent=2)
                #c += 1

                env_template["params"]["userCropParameters"][
                    "__enable_T_response_leaf_expansion__"] = setup[
                        "LeafExtensionModifier"]

                # set soil-profile
                #sp_json = soil_io3.soil_parameters(soil_db_con, int(soil_id))
                soil_profile = monica_io3.find_and_replace_references(
                    sp_json, sp_json)["result"]

                #print("soil:", soil_profile)

                env_template["params"]["siteParameters"][
                    "SoilProfileParameters"] = soil_profile

                # setting groundwater level
                if setup["groundwater-level"]:
                    groundwaterlevel = 20
                    layer_depth = 0
                    for layer in soil_profile:
                        if layer.get("is_in_groundwater", False):
                            groundwaterlevel = layer_depth
                            #print("setting groundwaterlevel of soil_id:", str(soil_id), "to", groundwaterlevel, "m")
                            break
                        layer_depth += Mrunlib.get_value(layer["Thickness"])
                    env_template["params"]["userEnvironmentParameters"][
                        "MinGroundwaterDepthMonth"] = 3
                    env_template["params"]["userEnvironmentParameters"][
                        "MinGroundwaterDepth"] = [
                            max(0, groundwaterlevel - 0.2), "m"
                        ]
                    env_template["params"]["userEnvironmentParameters"][
                        "MaxGroundwaterDepth"] = [groundwaterlevel + 0.2, "m"]

                # setting impenetrable layer
                if setup["impenetrable-layer"]:
                    impenetrable_layer_depth = Mrunlib.get_value(
                        env_template["params"]["userEnvironmentParameters"]
                        ["LeachingDepth"])
                    layer_depth = 0
                    for layer in soil_profile:
                        if layer.get("is_impenetrable", False):
                            impenetrable_layer_depth = layer_depth
                            #print("setting leaching depth of soil_id:", str(soil_id), "to", impenetrable_layer_depth, "m")
                            break
                        layer_depth += Mrunlib.get_value(layer["Thickness"])
                    env_template["params"]["userEnvironmentParameters"][
                        "LeachingDepth"] = [impenetrable_layer_depth, "m"]
                    env_template["params"]["siteParameters"][
                        "ImpenetrableLayerDepth"] = [
                            impenetrable_layer_depth, "m"
                        ]

                if setup["elevation"]:
                    env_template["params"]["siteParameters"][
                        "heightNN"] = float(height_nn)

                if setup["slope"]:
                    env_template["params"]["siteParameters"][
                        "slope"] = slope / 100.0

                clat, _ = cdict[(crow, ccol)]
                if setup["latitude"]:
                    clat, _ = cdict[(crow, ccol)]
                    env_template["params"]["siteParameters"]["Latitude"] = clat

                if setup["CO2"]:
                    env_template["params"]["userEnvironmentParameters"][
                        "AtmosphericCO2"] = float(setup["CO2"])

                if setup["O3"]:
                    env_template["params"]["userEnvironmentParameters"][
                        "AtmosphericO3"] = float(setup["O3"])

                env_template["params"]["simulationParameters"][
                    "UseNMinMineralFertilisingMethod"] = setup["fertilization"]
                env_template["params"]["simulationParameters"][
                    "UseAutomaticIrrigation"] = setup["irrigation"]

                env_template["params"]["simulationParameters"][
                    "NitrogenResponseOn"] = setup["NitrogenResponseOn"]
                env_template["params"]["simulationParameters"][
                    "WaterDeficitResponseOn"] = setup["WaterDeficitResponseOn"]
                env_template["params"]["simulationParameters"][
                    "EmergenceMoistureControlOn"] = setup[
                        "EmergenceMoistureControlOn"]
                env_template["params"]["simulationParameters"][
                    "EmergenceFloodingControlOn"] = setup[
                        "EmergenceFloodingControlOn"]

                env_template["csvViaHeaderOptions"] = sim_json[
                    "climate.csv-options"]

                subpath_to_csv = TEMPLATE_PATH_CLIMATE_CSV.format(
                    climate_data=climate_data,
                    climate_model_folder=(climate_model +
                                          "/" if climate_model else ""),
                    climate_scenario_folder=(climate_scenario +
                                             "/" if climate_scenario else ""),
                    climate_region=climate_region,
                    crow=str(crow),
                    ccol=str(ccol))
                # subpath_to_csv = climate_data + "/csvs/" \
                # + (climate_model + "/" if climate_model else "") \
                # + (climate_scenario + "/" if climate_scenario else "") \
                # + climate_region + "/row-" + str(crow) + "/col-" + str(ccol) + ".csv"
                env_template["pathToClimateCSV"] = paths[
                    "monica-path-to-climate-dir"] + subpath_to_csv
                print(env_template["pathToClimateCSV"])
                if DEBUG_WRITE_CLIMATE:
                    listOfClimateFiles.add(subpath_to_csv)

                env_template["customId"] = {
                    "setup_id": setup_id,
                    "srow": srow,
                    "scol": scol,
                    "crow": int(crow),
                    "ccol": int(ccol),
                    "soil_id": soil_id
                }

                if not DEBUG_DONOT_SEND:
                    socket.send_json(env_template)
                    print("sent env ", sent_env_count, " customId: ",
                          env_template["customId"])

                sent_env_count += 1

                # write debug output, as json file
                if DEBUG_WRITE:
                    debug_write_folder = paths["path-debug-write-folder"]
                    if not os.path.exists(debug_write_folder):
                        os.makedirs(debug_write_folder)
                    if sent_env_count < DEBUG_ROWS:

                        path_to_debug_file = debug_write_folder + "/row_" + str(
                            sent_env_count - 1) + "_" + str(setup_id) + ".json"

                        if not os.path.isfile(path_to_debug_file):
                            with open(path_to_debug_file, "w") as _:
                                _.write(json.dumps(env_template))
                        else:
                            print("WARNING: Row ", (sent_env_count - 1),
                                  " already exists")
            #print("unknown_soil_ids:", unknown_soil_ids)

            #print("crows/cols:", crows_cols)
        stop_setup_time = time.clock()
        print("Setup ", (sent_env_count - 1), " envs took ",
              (stop_setup_time - start_setup_time), " seconds")

    stop_time = time.clock()

    # write summary of used json files
    if DEBUG_WRITE_CLIMATE:
        debug_write_folder = paths["path-debug-write-folder"]
        if not os.path.exists(debug_write_folder):
            os.makedirs(debug_write_folder)

        path_to_climate_summary = debug_write_folder + "/climate_file_list" + ".csv"
        with open(path_to_climate_summary, "w") as _:
            _.write('\n'.join(listOfClimateFiles))

    try:
        print("sending ", (sent_env_count - 1), " envs took ",
              (stop_time - start_time), " seconds")
        #print("ran from ", start, "/", row_cols[start], " to ", end, "/", row_cols[end]
        print("exiting run_producer()")
    except Exception:
        raise