def process_message(msg): if not hasattr(process_message, "wnof_count"): process_message.received_env_count = 0 leave = False if msg["type"] == "finish": print("c: received finish message") leave = True else: print("c: received work result ", process_message.received_env_count, " customId: ", str(msg.get("customId", ""))) process_message.received_env_count += 1 #with open("out/out-" + str(i) + ".csv", 'wb') as _: with open(config["out"] + str(process_message.received_env_count) + ".csv", 'w', newline='') as _: writer = csv.writer(_, delimiter=",") for data_ in msg.get("data", []): results = data_.get("results", []) orig_spec = data_.get("origSpec", "") output_ids = data_.get("outputIds", []) if len(results) > 0: writer.writerow([orig_spec.replace("\"", "")]) for row in monica_io3.write_output_header_rows( output_ids, include_header_row=True, include_units_row=True, include_time_agg=False): writer.writerow(row) for row in monica_io3.write_output( output_ids, results): writer.writerow(row) writer.writerow([]) if config["leave_after_finished_run"] == True: leave = True return leave
def process_message(msg): if not hasattr(process_message, "wnof_count"): process_message.received_env_count = 0 leave = False if msg["type"] == "finish": print("c: received finish message") leave = True else: print("c: received work result ", process_message.received_env_count, " customId: ", str(msg.get("customId", ""))) process_message.received_env_count += 1 #with open("out/out-" + str(i) + ".csv", 'wb') as _: with open(config["out"] + str(process_message.received_env_count) + ".csv", 'w', newline='') as _: writer = csv.writer(_, delimiter=",") for data_ in msg.get("data", []): results = data_.get("results", []) orig_spec = data_.get("origSpec", "") output_ids = data_.get("outputIds", []) if len(results) > 0: writer.writerow([orig_spec.replace("\"", "")]) for row in monica_io3.write_output_header_rows(output_ids, include_header_row=True, include_units_row=True, include_time_agg=False): writer.writerow(row) for row in monica_io3.write_output(output_ids, results): writer.writerow(row) writer.writerow([]) if config["leave_after_finished_run"] == True : leave = True return leave
def process_message(msg): if not hasattr(process_message, "wnof_count"): process_message.wnof_count = 0 process_message.setup_count = 0 leave = False if msg["type"] == "finish": print("c: received finish message") leave = True elif not write_normal_output_files: custom_id = msg["customId"] setup_id = custom_id["setup_id"] data = setup_id_to_data[setup_id] row = custom_id["srow"] col = custom_id["scol"] #crow = custom_id.get("crow", -1) #ccol = custom_id.get("ccol", -1) #soil_id = custom_id.get("soil_id", -1) debug_msg = "received work result " + str(process_message.received_env_count) + " customId: " + str(msg.get("customId", "")) \ + " next row: " + str(data["next-row"]) \ + " cols@row to go: " + str(data["datacell-count"][row]) + "@" + str(row) + " cells_per_row: " + str(datacells_per_row[row])#\ #+ " rows unwritten: " + str(data["row-col-data"].keys()) print(debug_msg) #debug_file.write(debug_msg + "\n") data["row-col-data"][row][col].append(create_output(msg)) data["datacell-count"][row] -= 1 process_message.received_env_count = process_message.received_env_count + 1 #while data["next-row"] in data["row-col-data"] and data["datacell-count"][data["next-row"]] == 0: while data["datacell-count"][data["next-row"]] == 0: path_to_out_dir = config["out"] + str(setup_id) + "/" path_to_csv_out_dir = config["csv-out"] + str(setup_id) + "/" if not data["out_dir_exists"]: if os.path.isdir(path_to_out_dir) and os.path.exists( path_to_out_dir): data["out_dir_exists"] = True else: try: os.makedirs(path_to_out_dir) data["out_dir_exists"] = True except OSError: print("c: Couldn't create dir:", path_to_out_dir, "! Exiting.") exit(1) if os.path.isdir(path_to_csv_out_dir) and os.path.exists( path_to_csv_out_dir): data["out_dir_exists"] = True else: try: os.makedirs(path_to_csv_out_dir) data["out_dir_exists"] = True except OSError: print("c: Couldn't create dir:", path_to_csv_out_dir, "! Exiting.") exit(1) write_row_to_grids(data["row-col-data"], data["next-row"], data["ncols"], data["header"], path_to_out_dir, path_to_csv_out_dir, setup_id) debug_msg = "wrote row: " + str( data["next-row"]) + " next-row: " + str( data["next-row"] + 1) + " rows unwritten: " + str( list(data["row-col-data"].keys())) print(debug_msg) #debug_file.write(debug_msg + "\n") data["next-row"] += 1 # move to next row (to be written) if leave_after_finished_run \ and ((data["end_row"] < 0 and data["next-row"] > data["nrows"]-1) \ or (data["end_row"] >= 0 and data["next-row"] > data["end_row"])): process_message.setup_count += 1 # if all setups are done, the run_setups list should be empty and we can return if process_message.setup_count >= int( config["no-of-setups"]): print("c: all results received, exiting") leave = True break elif write_normal_output_files: if msg.get("type", "") in ["jobs-per-cell", "no-data", "setup_data"]: #print "ignoring", result.get("type", "") return print("received work result ", process_message.received_env_count, " customId: ", str(list(msg.get("customId", "").values()))) custom_id = msg["customId"] setup_id = custom_id["setup_id"] row = custom_id["srow"] col = custom_id["scol"] #crow = custom_id.get("crow", -1) #ccol = custom_id.get("ccol", -1) #soil_id = custom_id.get("soil_id", -1) process_message.wnof_count += 1 #with open("out/out-" + str(i) + ".csv", 'wb') as _: with open("out-normal/out-" + str(process_message.wnof_count) + ".csv", "w", newline='') as _: writer = csv.writer(_, delimiter=";") for data_ in msg.get("data", []): results = data_.get("results", []) orig_spec = data_.get("origSpec", "") output_ids = data_.get("outputIds", []) if len(results) > 0: writer.writerow([orig_spec.replace("\"", "")]) for row in monica_io3.write_output_header_rows( output_ids, include_header_row=True, include_units_row=True, include_time_agg=False): writer.writerow(row) for row in monica_io3.write_output( output_ids, results): writer.writerow(row) writer.writerow([]) process_message.received_env_count = process_message.received_env_count + 1 return leave
def main(): "collect data from workers" config = CONFIGURATION if len(sys.argv) > 1 and __name__ == "__main__": for arg in sys.argv[1:]: k,v = arg.split("=") if k in config: if k == "timeout" or k == "start_writing_lines_threshold": config[k] = int(v) else : config[k] = v if not config["server"]: config["server"] = server[config["mode"]] print("consumer config:", config) data = defaultdict(list) i = 1 context = zmq.Context() socket = context.socket(zmq.PULL) # pylint: disable=no-member socket.connect("tcp://" + config["server"] + ":" + config["port"]) socket.RCVTIMEO = config["timeout"] leave = False write_normal_output_files = config["write_normal_output_files"] == "true" start_writing_lines_threshold = config["start_writing_lines_threshold"] while not leave: try: result = socket.recv_json(encoding="latin-1") except zmq.error.Again as _e: print('no response from the server (with "timeout"=%d ms) ' % socket.RCVTIMEO) for soil_ref in data.keys(): if len(data[soil_ref]) > 0: write_data(soil_ref, data, config["mode"]) return except: for soil_ref in data.keys(): if len(data[soil_ref]) > 0: write_data(soil_ref, data, config["mode"]) continue if result["type"] == "finish": print("received finish message") leave = True elif not write_normal_output_files: if len(result["errors"]) > 0 : for err in result["errors"] : print(err) custom_id = result["customId"] soil_ref = custom_id["soil_ref"] period = custom_id["period"] gcm = custom_id["gcm"] co2_id = custom_id["co2_id"] co2_value = custom_id["co2_value"] trt_no = custom_id["trt_no"] prod_case = custom_id["prod_case"] crop_id = custom_id["crop_id"] first_cp = custom_id["first_cp"] #print("recv env ", sendID, "customId: ", list(custom_id.values())) # print(custom_id) res = create_output(soil_ref, crop_id, first_cp, co2_id, co2_value, period, gcm, trt_no, prod_case, result) data[soil_ref].extend(res) if len(data[soil_ref]) >= start_writing_lines_threshold: write_data(soil_ref, data, config["mode"]) i = i + 1 elif write_normal_output_files: print("received work result ", i, " customId: ", result.get("customId", "")) if result.get("type", "") in ["jobs-per-cell", "no-data", "setup_data"]: return with open("out/out-" + str(i) + ".csv", 'w') as _: writer = csv.writer(_, delimiter=",") for data_ in result.get("data", []): results = data_.get("results", []) orig_spec = data_.get("origSpec", "") output_ids = data_.get("outputIds", []) if len(results) > 0: writer.writerow([orig_spec.replace("\"", "")]) for row in monica_io3.write_output_header_rows(output_ids, include_header_row=True, include_units_row=True, include_time_agg=False): writer.writerow(row) for row in monica_io3.write_output(output_ids, results): writer.writerow(row) writer.writerow([]) i = i + 1