コード例 #1
0
ファイル: model_chess.py プロジェクト: sky4star/Chess-Zero
 def load(self, config_path, weight_path):
     mc = self.config.model
     resources = self.config.resource
     if mc.distributed and config_path == resources.model_best_config_path:
         try:
             logger.debug("loading model from server")
             ftp_connection = ftplib.FTP(
                 resources.model_best_distributed_ftp_server,
                 resources.model_best_distributed_ftp_user,
                 resources.model_best_distributed_ftp_password)
             ftp_connection.cwd(
                 resources.model_best_distributed_ftp_remote_path)
             ftp_connection.retrbinary("RETR model_best_config.json",
                                       open(config_path, 'wb').write)
             ftp_connection.retrbinary("RETR model_best_weight.h5",
                                       open(weight_path, 'wb').write)
             ftp_connection.quit()
         except:
             pass
     from tensorflow import get_default_graph
     if os.path.exists(config_path) and os.path.exists(weight_path):
         logger.debug(f"loading model from {config_path}")
         with open(config_path, "rt") as f:
             self.model = Model.from_config(json.load(f))
         self.model.load_weights(weight_path)
         self.graph = get_default_graph()
         self.digest = self.fetch_digest(weight_path)
         logger.debug(f"loaded model digest = {self.digest}")
         #print(self.model.summary)
         return True
     else:
         logger.debug(
             f"model files does not exist at {config_path} and {weight_path}"
         )
         return False
コード例 #2
0
 def load(self, config_path, weight_path):
     mc = self.config.model
     resources = self.config.resource
     if mc.distributed and config_path == resources.model_best_config_path:
         try:
             logger.debug("loading model from server")
             ftp_connection = ftplib.FTP(
                 resources.model_best_distributed_ftp_server,
                 resources.model_best_distributed_ftp_user,
                 resources.model_best_distributed_ftp_password)
             ftp_connection.cwd(
                 resources.model_best_distributed_ftp_remote_path)
             ftp_connection.retrbinary("RETR model_best_config.json",
                                       open(config_path, 'wb').write)
             ftp_connection.retrbinary("RETR model_best_weight.h5",
                                       open(weight_path, 'wb').write)
             ftp_connection.quit()
         except:
             pass
     if os.path.exists(config_path) and os.path.exists(weight_path):
         logger.debug("loading model from %s" % (config_path))
         with open(config_path, "rt") as f:
             self.model = Model.from_config(json.load(f))
         self.model.load_weights(weight_path)
         self.model._make_predict_function()
         self.digest = self.fetch_digest(weight_path)
         logger.debug("loaded model digest = %s" % (self.digest))
         return True
     else:
         logger.debug("model files does not exist at %s and %s" %
                      (config_path, weight_path))
         return False
コード例 #3
0
    def load(self, config_path, weight_path):
        mc = self.config.model
        resources = self.config.resource
        #if mc.distributed and config_path == resources.model_best_config_path:
        if hasattr(
                mc, "distributed"
        ) and mc.distributed == True and config_path == resources.model_best_config_path:
            logger.debug(f"loading model from server")
            ftp_connection = ftplib.FTP(
                resources.model_best_distributed_ftp_server,
                resources.model_best_distributed_ftp_user,
                resources.model_best_distributed_ftp_password)
            ftp_connection.cwd(
                resources.model_best_distributed_ftp_remote_path)
            ftp_connection.retrbinary("RETR model_best_config.json",
                                      open(config_path, 'wb').write)
            ftp_connection.retrbinary("RETR model_best_weight.h5",
                                      open(weight_path, 'wb').write)
            ftp_connection.quit()

        if os.path.exists(config_path) and os.path.exists(weight_path):
            logger.debug(f"loading model from {config_path}")
            with open(config_path, "rt") as f:
                self.model = Model.from_config(json.load(f))
            self.model.load_weights(weight_path)
            self.digest = self.fetch_digest(weight_path)
            logger.debug(f"loaded model digest = {self.digest}")
            return True
        else:
            logger.debug(
                f"model files does not exist at {config_path} and {weight_path}"
            )
            return False
コード例 #4
0
ファイル: model.py プロジェクト: a-k-r-a-k-r/PyChess
    def load(self, config_path, weight_path):
        """

        :param str config_path: path to the file containing the entire configuration
        :param str weight_path: path to the file containing the model weights
        :return: true iff successful in loading
        """
        mc = self.config.model
        resources = self.config.resource
        if mc.distributed and config_path == resources.model_best_config_path:
            try:
                ftp_connection = ftplib.FTP(
                    resources.model_best_distributed_ftp_server,
                    resources.model_best_distributed_ftp_user,
                    resources.model_best_distributed_ftp_password)
                ftp_connection.cwd(
                    resources.model_best_distributed_ftp_remote_path)
                ftp_connection.retrbinary("RETR model_best_config.json",
                                          open(config_path, 'wb').write)
                ftp_connection.retrbinary("RETR model_best_weight.h5",
                                          open(weight_path, 'wb').write)
                ftp_connection.quit()
            except:
                pass
        if os.path.exists(config_path) and os.path.exists(weight_path):
            with open(config_path, "rt") as f:
                self.model = Model.from_config(json.load(f))
            self.model.load_weights(weight_path)
            self.model._make_predict_function()
            self.digest = self.fetch_digest(weight_path)
            return True
        else:
            return False
コード例 #5
0
    def load(self, config_path, weight_path):
        if os.path.exists(config_path) and os.path.exists(weight_path):
            logger.debug(f"loading model from {config_path}")
            with open(config_path, "rt") as f:
                config = json.load(f)

                if 'weight_digest' in config:
                    exp_digest = config['weight_digest']
                    act_digest = self.fetch_digest(weight_path)
                    if exp_digest != act_digest:
                        logger.debug(
                            f"exp weight digest {exp_digest}, act {act_digest}"
                        )
                        return None

                try:
                    steps = int(config['steps'])
                except ValueError:
                    steps = None
                del config['steps']
                self.model = Model.from_config(config)
            self.model.load_weights(weight_path)
            self.digest = self.fetch_digest(weight_path)
            logger.debug(f"loaded model digest = {self.digest}")
            return steps
        else:
            logger.debug(
                f"model files does not exist at {config_path} and {weight_path}"
            )
            return None
コード例 #6
0
ファイル: resicnn.py プロジェクト: ikuinen/ImageRestore
 def load(self, config_path, model_path):  # load model
     print('restore model...')
     if os.path.exists(config_path) and os.path.exists(model_path):
         with open(config_path, 'r') as fp:
             self.model = Model.from_config(json.load(fp))
             self.model.load_weights(model_path)
         return True
     return False
コード例 #7
0
 def load(self, config_path, weight_path):
     if os.path.exists(config_path) and os.path.exists(weight_path):
         logger.debug(f"loading model from {config_path}")
         with open(config_path, "rt") as f:
             self.model = Model.from_config(json.load(f))
         self.model.load_weights(weight_path)
         self.digest = self.fetch_digest(weight_path)
         logger.debug(f"loaded model digest = {self.digest}")
         return True
     else:
         logger.debug(f"model files does not exist at {config_path} and {weight_path}")
         return False
コード例 #8
0
 def load(self, config_path, weight_path):
     if os.path.exists(config_path) and os.path.exists(weight_path):
         print("loading model from ", config_path)
         with open(config_path, "rt") as f:
             self.model = Model.from_config(json.load(f))
         self.model.load_weights(weight_path)
         self.digest = self.fetch_digest(weight_path)
         print("loaded model digest = ", self.digest)
         return True
     else:
         print("model files does not exist at ", config_path, " and ",
               weight_path)
         return False
コード例 #9
0
 def load(self, config_path, weight_path):
     if os.path.exists(config_path) and os.path.exists(weight_path):
         logger.debug(f"loading model from {config_path}")
         with open(config_path, "rt") as f:
             self.model = Model.from_config(json.load(f))
         self.model.load_weights(weight_path)
         self.digest = self.fetch_digest(weight_path)
         self.graph = tf.get_default_graph()
         logger.debug(f"loaded model digest = {self.digest}")
         return True
     else:
         logger.debug(f"model files does not exist at {config_path} and {weight_path}")
         return False
コード例 #10
0
 def load(self, config_path, weight_path):
     if os.path.exists(config_path) and os.path.exists(weight_path):
         print(f"loading model from {config_path}")
         with open(config_path, "rt") as f:
             self.model = Model.from_config(json.load(f))
         self.model.load_weights(weight_path)
         self.graph = tf.get_default_graph()
         print(f"loaded model digest = {self.fetch_digest(weight_path)}")
         return True
     else:
         print(
             f"model files does not exist at {config_path} and {weight_path}"
         )
         return False
コード例 #11
0
 def load(self, config_path, weight_path):
     if os.path.exists(config_path) and os.path.exists(weight_path):
         logger.debug(f"loading model from {config_path}")
         with open(config_path, "rt") as f:
             self.model = Model.from_config(json.load(f))
         self.model.load_weights(weight_path)
         self.graph = get_default_graph()
         # self.model._make_predict_function()
         self.digest = self.fetch_digest(weight_path)
         logger.debug(f"loaded model digest = {self.digest}")
         return True
     else:
         logger.debug(
             f"model files do not exist at {config_path} and {weight_path}")
         return False
コード例 #12
0
ファイル: model.py プロジェクト: bababax11/slipe
 def load(self, config_path: str, weight_path: str) -> bool:
     if os.path.exists(weight_path):  # os.path.exists(config_path) and
         logger.debug(f"loading model from {config_path}")
         with open(config_path, "rt") as f:
             self.model = Model.from_config(json.load(f))
         self.model.load_weights(weight_path)
         self.model.compile(
             loss='mse', optimizer=Adam(lr=self.config.model.learning_rate))
         self.model.summary()
         self.digest = self.fetch_digest(weight_path)
         logger.debug(f"loaded model digest = {self.digest}")
         return True
     else:
         logger.debug(
             f"model files does not exist at {config_path} and {weight_path}"
         )
         return False
コード例 #13
0
    def Load(self, configPath, weightPath):

        if os.access(configPath, os.F_OK):
            while os.access(configPath, os.W_OK) == False or os.access(
                    weightPath, os.W_OK) == False:
                time.sleep(0.001)

        while True:
            try:
                with open(configPath, "rt") as f:
                    config = json.load(f)
                    self.OptimizeCount = config["OptimizeCount"]
                    self.TimeLimit = config["TimeLimit"]
                    self.Model = Model.from_config(config)
                    self.Model.load_weights(weightPath)
                break
            except:
                time.sleep(0.1)
コード例 #14
0
    def load(self, config_path, weight_path):
        """

        :param str config_path: path to the file containing the entire configuration
        :param str weight_path: path to the file containing the model weights
        :return: true iff successful in loading
        """
        mc = self.config.model
        resources = self.config.resource
        if os.path.exists(config_path) and os.path.exists(weight_path):
            logger.debug(f"loading model from {config_path}")
            with open(config_path, "rt") as f:
                self.model = Model.from_config(json.load(f))
            self.model.load_weights(weight_path)
            self.model._make_predict_function()
            self.digest = self.fetch_digest(weight_path)
            logger.debug(f"loaded model digest = {self.digest}")
            return True
        else:
            logger.debug(f"model files does not exist at {config_path} and {weight_path}")
            return False