Ejemplo n.º 1
0
class KDPEvalServer:
    def __init__(self):
        self.request_dir = os.environ["request_dir"]
        info_path = os.path.join(self.request_dir, "req_job_info.json")
        self.json_tied_dict = JsonTiedDict(info_path)
        self.next_job_id = self.json_tied_dict.last_id()

    def start(self):
        class RequestHandler(SimpleXMLRPCRequestHandler):
            rpc_paths = ('/RPC2', )

        class RPCThreading(socketserver.ThreadingMixIn, SimpleXMLRPCServer):
            pass

        print("")
        print("  [ KDPEvalServer ]")
        print()
        print("Preparing server")
        server = RPCThreading(("0.0.0.0", port),
                              requestHandler=RequestHandler,
                              allow_none=True)
        server.register_introspection_functions()
        server.register_function(self.eval_job, 'eval_job')
        print("Waiting")
        server.serve_forever()

    def save_request(self, job_id, kdp_list: List[KDP]):
        save_path = os.path.join(self.request_dir, str(job_id))
        temp_save_path = save_path + ".tmp"
        pickle.dump(kdp_list, open(temp_save_path, "wb"))
        os.rename(temp_save_path, save_path)

    def eval_job(self, kdp_list_raw: List[Tuple]):
        kdp_list: List[KDP] = lmap(KDP.from_state, kdp_list_raw)
        job_id = self.next_job_id
        self.save_request(job_id, kdp_list)
        self.next_job_id += 1
        self.json_tied_dict.set('last_task_id', self.next_job_id)
        return job_id
Ejemplo n.º 2
0
class TFRecordMaker:
    def __init__(self):
        self.request_dir = os.environ["request_dir"]
        self.tf_record_dir = os.environ["tf_record_dir"]
        info_path = os.path.join(self.request_dir, "info.json")
        self.json_tied_dict = JsonTiedDict(info_path)
        self.next_job_id = self.json_tied_dict.last_id() + 1
        self.qck_generator: QCKGenDynamicKDP = get_qck_gen_dynamic_kdp()
        self.save_dir = os.path.join(output_path, "cppnc_auto")

        score_save_path_format = os.path.join(self.request_dir, "{}")
        self.job_runner = FileWatchingJobRunner(score_save_path_format,
                                                info_path, self.make_tfrecord,
                                                "tfrecord maker")

        print("")
        print("  [ TFRecordMaker ]")
        print()

    def file_watch_daemon(self):
        self.job_runner.start()
        print("TFRecordMaker thread()")

    def make_tfrecord(self, job_id: int):
        save_path = os.path.join(self.request_dir, str(job_id))
        kdp_list = pickle.load(open(save_path, "rb"))
        data_id_manager = DataIDManager(0, 1000 * 1000)
        print("{} kdp".format(len(kdp_list)))
        insts = self.qck_generator.generate(kdp_list, data_id_manager)
        record_save_path = os.path.join(self.tf_record_dir, str(job_id))
        write_records_w_encode_fn(record_save_path,
                                  self.qck_generator.encode_fn, insts)
        # Save for backup
        info_save_path = os.path.join(self.tf_record_dir,
                                      "{}.info".format(job_id))
        pickle.dump(data_id_manager.id_to_info, open(info_save_path, "wb"))
        # launch estimator
        add_estimator_job(job_id)