Esempio n. 1
0
def keras_train_and_save(estimator, model_params, save, FLAGS,
                         train_dataset_fn, val_dataset_fn, label_meta, epochs,
                         verbose, metric_names, validation_steps,
                         load_pretrained_model, model_meta):
    print("Start training using keras model...")
    classifier, has_none_optimizer = keras_compile(estimator, model_params,
                                                   save, metric_names)
    train_dataset = train_dataset_fn()
    if val_dataset_fn is not None:
        validate_dataset = val_dataset_fn()
    else:
        validate_dataset = None

    if load_pretrained_model:
        # FIXME(typhoonzero): copied from runtime.tensorflow.train_keras
        inputs, targets = next(iter(train_dataset.take(1)))
        classifier.evaluate(inputs, targets)
        classifier.load_weights(save)

    if len(FLAGS.worker_hosts.split(",")) > 1:
        keras_train_distributed(classifier, model_params, save, model_meta,
                                FLAGS, train_dataset_fn, val_dataset_fn)
    else:
        keras_train_compiled(classifier, save, train_dataset, validate_dataset,
                             label_meta, epochs, verbose, model_meta,
                             validation_steps, has_none_optimizer)

    print("saving keras model to: %s" % FLAGS.sqlflow_oss_modeldir)
    if len(FLAGS.worker_hosts.split(",")) > 1:
        oss.save_dir(FLAGS.sqlflow_oss_modeldir, save)
    else:
        oss.save_file(FLAGS.sqlflow_oss_modeldir, save)
    oss.save_file(FLAGS.sqlflow_oss_modeldir, "model_meta.json")
Esempio n. 2
0
    def save_to_oss(self, oss_model_dir, local_dir=None):
        """
        This save function would archive all the files on local_dir
        into a tarball, and save it into OSS model directory.

        Args:
            oss_model_dir (str): the OSS model directory to save.
                It is in the format of oss://bucket/path/to/dir/.
            local_dir (str): the local directory to save.

        Returns:
            None.
        """
        if local_dir is None:
            local_dir = os.getcwd()

        with temp_file.TemporaryDirectory() as tmp_dir:
            tarball = os.path.join(tmp_dir, TARBALL_NAME)
            self._zip(local_dir, tarball)
            oss.save_file(oss_model_dir, tarball, TARBALL_NAME)

        with temp_file.TemporaryDirectory() as tmp_dir:
            model_obj_file = os.path.join(tmp_dir, MODEL_OBJ_FILE_NAME)
            with open(model_obj_file, "w") as f:
                f.write(
                    json.dumps(self._to_dict(),
                               cls=JSONEncoderWithFeatureColumn))
            oss.save_file(oss_model_dir, model_obj_file, MODEL_OBJ_FILE_NAME)
Esempio n. 3
0
def save_model(model_dir, filename, model_params, train_params, feature_metas,
               feature_column_names, label_meta, fc_map_ir):
    oss.save_file(model_dir, filename)
    oss.save_file(model_dir, "{}.pmml".format(filename))
    oss.save_metas(
        model_dir,
        1,
        "xgboost_model_desc",
        "",  # estimator = ""
        model_params,
        train_params,
        feature_metas,
        feature_column_names,
        label_meta,
        fc_map_ir)
Esempio n. 4
0
def keras_train_and_save_legacy(estimator, model_params, save, FLAGS,
                                train_dataset_fn, val_dataset_fn, label_meta,
                                epochs, verbose, metric_names,
                                validation_steps, load_pretrained_model,
                                model_meta, is_pai):
    print("Start training using keras model...")
    try:
        classifier, has_none_optimizer = keras_compile(estimator, model_params,
                                                       metric_names)
    except Exception as e:
        if hasattr(estimator, "sqlflow_train_loop"):
            sys.stderr.write(
                "compile keras model failed, ignoring this error "
                "since the model seems to defined sqlflow_train_loop.")
            classifier = init_model_with_feature_column(
                estimator, model_params, has_none_optimizer=True)
            has_none_optimizer = True
        else:
            raise e

    train_dataset = train_dataset_fn()
    if val_dataset_fn is not None:
        validate_dataset = val_dataset_fn()
    else:
        validate_dataset = None

    if load_pretrained_model:
        # Must run one batch to initialize parameters before load_weights
        inputs, targets = next(iter(train_dataset.take(1)))
        classifier.evaluate(inputs, targets)

        # NOTE(sneaxiy): should we save/load optimizer info for incremental
        # training, or let users to write the same WITH statements in SQL?
        load_keras_model_weights(classifier, save)

    if len(FLAGS.worker_hosts.split(",")) > 1:
        keras_train_distributed(classifier, model_params, save, model_meta,
                                FLAGS, train_dataset_fn, val_dataset_fn,
                                is_pai)
    else:
        keras_train_compiled(classifier, save, train_dataset, validate_dataset,
                             label_meta, epochs, verbose, model_meta,
                             validation_steps, has_none_optimizer)
    if is_pai:
        print("saving keras model to: %s" % FLAGS.sqlflow_oss_modeldir)
        oss.save_dir(FLAGS.sqlflow_oss_modeldir, save)
        oss.save_file(FLAGS.sqlflow_oss_modeldir, "model_meta.json")
Esempio n. 5
0
def keras_train_and_save(estimator, model_params, save, FLAGS,
                         train_dataset_fn, val_dataset_fn, label_meta, epochs,
                         verbose, metric_names, validation_steps, load,
                         model_meta, is_pai):
    print("Start training using keras model...")
    try:
        classifier, has_none_optimizer = keras_compile(estimator, model_params,
                                                       metric_names)
    except Exception:
        if hasattr(estimator, "sqlflow_train_loop"):
            sys.stderr.write(
                "compile keras model failed, ignoring this error "
                "since the model seems to defined sqlflow_train_loop.")
            classifier = init_model_with_feature_column(
                estimator, model_params, has_none_optimizer=True)
            has_none_optimizer = True
        else:
            six.reraise(*sys.exc_info())

    train_dataset = train_dataset_fn()
    if val_dataset_fn is not None:
        validate_dataset = val_dataset_fn()
    else:
        validate_dataset = None

    if load:
        # FIXME(typhoonzero): copied from runtime.tensorflow.train_keras
        inputs, targets = next(iter(train_dataset.take(1)))
        classifier.evaluate(inputs, targets)
        load_keras_model_weights(classifier, load)

    if len(FLAGS.worker_hosts.split(",")) > 1:
        keras_train_distributed(classifier, model_params, save, model_meta,
                                FLAGS, train_dataset_fn, val_dataset_fn,
                                is_pai)
    else:
        keras_train_compiled(classifier, save, train_dataset, validate_dataset,
                             label_meta, epochs, verbose, model_meta,
                             validation_steps, has_none_optimizer)

    if is_pai:
        print("saving keras model to: %s" % FLAGS.sqlflow_oss_modeldir)
        oss.save_dir(FLAGS.sqlflow_oss_modeldir, save)
        oss.save_file(FLAGS.sqlflow_oss_modeldir, "model_meta.json")
Esempio n. 6
0
    def save_to_oss(self, oss_model_dir, local_dir=None):
        """
        This save function would archive all the files on local_dir
        into a tarball, and save it into OSS model directory.

        Args:
            oss_model_dir (str): the OSS model directory to save.
                It is in the format of oss://bucket/path/to/dir/.
            local_dir (str): the local directory to save.

        Returns:
            None.
        """
        if local_dir is None:
            local_dir = os.getcwd()

        with temp_file.TemporaryDirectory() as tmp_dir:
            tarball = os.path.join(tmp_dir, TARBALL_NAME)
            self._zip(local_dir, tarball)
            oss.save_file(oss_model_dir, tarball, TARBALL_NAME)
Esempio n. 7
0
def save_model(model_dir, filename, model_params, train_params, feature_metas,
               feature_column_names, label_meta, feature_column_code):
    pai_model_store.save_file(model_dir, filename)
    pai_model_store.save_file(model_dir, "{}.pmml".format(filename))
    pai_model_store.save_file(model_dir, "model_meta.json")
    # (TODO:lhw) remove this function call, use the new metadata in load_metas
    pai_model_store.save_metas(
        model_dir,
        1,
        "xgboost_model_desc",
        "",  # estimator = ""
        model_params,
        train_params,
        feature_metas,
        feature_column_names,
        label_meta,
        feature_column_code)