Exemplo n.º 1
0
def _export_hdfs_model(hdfs_model_path, model_dir_hdfs, overwrite):
    """
    Exports a hdfs directory of model files to Hopsworks "Models" dataset

     Args:
        :hdfs_model_path: the path to the model files in hdfs
        :model_dir_hdfs: path to the directory in HDFS to put the model files
        :overwrite: boolean flag whether to overwrite in case a model already exists in the exported directory

    Returns:
           the path to the exported model files in HDFS
    """
    if hdfs.isdir(hdfs_model_path):
        for file_source_path in hdfs.ls(hdfs_model_path):
            model_name = file_source_path
            if constants.DELIMITERS.SLASH_DELIMITER in file_source_path:
                last_index = model_name.rfind(constants.DELIMITERS.SLASH_DELIMITER)
                model_name = model_name[last_index + 1:]
            dest_path = model_dir_hdfs + constants.DELIMITERS.SLASH_DELIMITER + model_name
            hdfs.cp(file_source_path, dest_path, overwrite=overwrite)
    elif hdfs.isfile(hdfs_model_path):
        model_name = hdfs_model_path
        if constants.DELIMITERS.SLASH_DELIMITER in hdfs_model_path:
            last_index = model_name.rfind(constants.DELIMITERS.SLASH_DELIMITER)
            model_name = model_name[last_index + 1:]
        dest_path = model_dir_hdfs + constants.DELIMITERS.SLASH_DELIMITER + model_name
        hdfs.cp(hdfs_model_path, dest_path, overwrite=overwrite)

    return model_dir_hdfs
Exemplo n.º 2
0
def _clean_dir(clean_dir, keep=[]):
    """Deletes all files in a directory but keeps a few.
    """
    if not hopshdfs.isdir(clean_dir):
        raise ValueError(
            "{} is not a directory. Use `hops.hdfs.delete()` to delete single "
            "files.".format(clean_dir))
    for path in hopshdfs.ls(clean_dir):
        if path not in keep:
            hopshdfs.delete(path, recursive=True)
Exemplo n.º 3
0
def _get_best(root_logdir, direction):

    min_val = sys.float_info.max
    min_logdir = None

    max_val = sys.float_info.min
    max_logdir = None

    generation_folders = hdfs.ls(root_logdir)
    generation_folders.sort()

    for generation in generation_folders:
        for individual in hdfs.ls(generation):
            invidual_files = hdfs.ls(individual, recursive=True)
            for file in invidual_files:
                if file.endswith("/.metric"):
                    val = hdfs.load(file)
                    val = float(val)

                    if val > max_val:
                        max_val = val
                        max_logdir = file[:-8]

                    if val < min_val:
                        min_val = val
                        min_logdir = file[:-8]



    if direction.upper() == Direction.MAX:
        return_dict = {}
        with hdfs.open_file(max_logdir + '/.outputs.json', flags="r") as fi:
            return_dict = json.loads(fi.read())
            fi.close()
        return max_logdir, return_dict
    else:
        return_dict = {}
        with hdfs.open_file(min_logdir + '/.outputs.json', flags="r") as fi:
            return_dict = json.loads(fi.read())
            fi.close()
        return min_logdir, return_dict
Exemplo n.º 4
0
def _build_summary_json(logdir):

    combinations = []
    return_files = []
    hp_arr = None
    output_arr = None

    for experiment_dir in hdfs.ls(logdir):
        runs = hdfs.ls(experiment_dir, recursive=True)
        for run in runs:
            if run.endswith('.outputs.json'):
                return_files.append(run)

    for return_file in return_files:
        output_arr = _convert_return_file_to_arr(return_file)
        param_file = return_file.replace('outputs.json', 'hparams.json')
        if hdfs.exists(param_file):
            hp_arr = _convert_param_to_arr(param_file)
        combinations.append({'parameters': hp_arr, 'outputs': output_arr})

    return dumps({'combinations': combinations})
Exemplo n.º 5
0
def _build_summary_json(logdir):
    """Builds the summary json to be read by the experiments service.
    """
    combinations = []

    for trial in hopshdfs.ls(logdir):
        if hopshdfs.isdir(trial):
            return_file = trial + "/.outputs.json"
            hparams_file = trial + "/.hparams.json"
            if hopshdfs.exists(return_file) and hopshdfs.exists(hparams_file):
                metric_arr = experiment_utils._convert_return_file_to_arr(
                    return_file)
                hparams_dict = _load_hparams(hparams_file)
                combinations.append({
                    "parameters": hparams_dict,
                    "outputs": metric_arr
                })

    return json.dumps({"combinations": combinations},
                      default=json_default_numpy)
Exemplo n.º 6
0
 def ls(self, dir_path, recursive=False, exclude_nn_addr=None):
     return hopshdfs.ls(
         dir_path, recursive=recursive, exclude_nn_addr=exclude_nn_addr
     )
Exemplo n.º 7
0
def export(model_path, model_name, model_version=None, overwrite=False, metrics=None, description=None, synchronous=True, synchronous_timeout=120):
    """
    Copies a trained model to the Models directory in the project and creates the directory structure of:

    >>> Models
    >>>      |
    >>>      - model_name
    >>>                 |
    >>>                 - version_x
    >>>                 |
    >>>                 - version_y

    For example if you run this:

    >>> from hops import model
    >>> model.export("iris_knn.pkl", "irisFlowerClassifier", metrics={'accuracy': accuracy})

    It will copy the local model file "iris_knn.pkl" to /Projects/projectname/Models/irisFlowerClassifier/1/iris.knn.pkl
    on HDFS, and overwrite in case there already exists a file with the same name in the directory.

    If "model" is a directory on the local path exported by TensorFlow, and you run:

    >>> model.export("/model", "mnist", metrics={'accuracy': accuracy, 'loss': loss})

    It will copy the model directory contents to /Projects/projectname/Models/mnist/1/ , e.g the "model.pb" file and
    the "variables" directory.

    Args:
        :model_path: path to the trained model (HDFS or local)
        :model_name: name of the model
        :model_version: version of the model
        :overwrite: boolean flag whether to overwrite in case a model already exists in the exported directory
        :metrics: dict of evaluation metrics to attach to model
        :description: description about the model
        :synchronous: whether to synchronously wait for the model to be indexed in the models rest endpoint
        :synchronous_timeout: max timeout in seconds for waiting for the model to be indexed

    Returns:
        The path to where the model was exported

    Raises:
        :ValueError: if there was an error with th of the model due to invalid user input
        :ModelNotFound: if the model was not found
    """

    # Make sure model name is a string, users could supply numbers
    model_name = str(model_name)

    if not isinstance(model_path, string_types):
        model_path = model_path.decode()

    if not description:
        description = 'A collection of models for ' + model_name

    project_path = hdfs.project_path()

    assert hdfs.exists(project_path + "Models"), "Your project is missing a dataset named Models, please create it."

    if not hdfs.exists(model_path) and not os.path.exists(model_path):
        raise ValueError("the provided model_path: {} , does not exist in HDFS or on the local filesystem".format(
            model_path))

    # make sure metrics are numbers
    if metrics:
        _validate_metadata(metrics)

    model_dir_hdfs = project_path + constants.MODEL_SERVING.MODELS_DATASET + \
                     constants.DELIMITERS.SLASH_DELIMITER + model_name + constants.DELIMITERS.SLASH_DELIMITER

    if not hdfs.exists(model_dir_hdfs):
        hdfs.mkdir(model_dir_hdfs)
        hdfs.chmod(model_dir_hdfs, "ug+rwx")

    # User did not specify model_version, pick the current highest version + 1, set to 1 if no model exists
    version_list = []
    if not model_version and hdfs.exists(model_dir_hdfs):
        model_version_directories = hdfs.ls(model_dir_hdfs)
        for version_dir in model_version_directories:
            try:
                if hdfs.isdir(version_dir):
                    version_list.append(int(version_dir[len(model_dir_hdfs):]))
            except:
                pass
        if len(version_list) > 0:
            model_version = max(version_list) + 1

    if not model_version:
        model_version = 1

    # Path to directory in HDFS to put the model files
    model_version_dir_hdfs = model_dir_hdfs + str(model_version)

    # If version directory already exists and we are not overwriting it then fail
    if not overwrite and hdfs.exists(model_version_dir_hdfs):
        raise ValueError("Could not create model directory: {}, the path already exists, "
                         "set flag overwrite=True "
                         "to remove the version directory and create the correct directory structure".format(model_version_dir_hdfs))

    # Overwrite version directory by deleting all content (this is needed for Provenance to register Model as deleted)
    if overwrite and hdfs.exists(model_version_dir_hdfs):
       hdfs.delete(model_version_dir_hdfs, recursive=True)
       hdfs.mkdir(model_version_dir_hdfs)

    # At this point we can create the version directory if it does not exists
    if not hdfs.exists(model_version_dir_hdfs):
       hdfs.mkdir(model_version_dir_hdfs)

    # Export the model files
    if os.path.exists(model_path):
        export_dir=_export_local_model(model_path, model_version_dir_hdfs, overwrite)
    else:
        export_dir=_export_hdfs_model(model_path, model_version_dir_hdfs, overwrite)

    print("Exported model " + model_name + " as version " + str(model_version) + " successfully.")

    jobName=None
    if constants.ENV_VARIABLES.JOB_NAME_ENV_VAR in os.environ:
        jobName = os.environ[constants.ENV_VARIABLES.JOB_NAME_ENV_VAR]

    kernelId=None
    if constants.ENV_VARIABLES.KERNEL_ID_ENV_VAR in os.environ:
        kernelId = os.environ[constants.ENV_VARIABLES.KERNEL_ID_ENV_VAR]

    # Attach modelName_modelVersion to experiment directory
    model_summary = {'name': model_name, 'version': model_version, 'metrics': metrics,
    'experimentId': None, 'description': description, 'jobName': jobName, 'kernelId': kernelId}
    if 'ML_ID' in os.environ:
        # Attach link from experiment to model
        experiment_utils._attach_model_link_xattr(os.environ['ML_ID'], model_name + '_' + str(model_version))
        # Attach model metadata to models version folder
        model_summary['experimentId'] = os.environ['ML_ID']
        experiment_utils._attach_model_xattr(model_name + "_" + str(model_version), experiment_utils.dumps(model_summary))
    else:
        experiment_utils._attach_model_xattr(model_name + "_" + str(model_version), experiment_utils.dumps(model_summary))

    # Model metadata is attached asynchronously by Epipe, therefore this necessary to ensure following steps in a pipeline will not fail
    if synchronous:
        start_time = time.time()
        sleep_seconds = 5
        for i in range(int(synchronous_timeout/sleep_seconds)):
            try:
                time.sleep(sleep_seconds)
                print("Polling " + model_name + " version " + str(model_version) + " for model availability.")
                resp = get_model(model_name, model_version)
                if resp.ok:
                    print("Model now available.")
                    return
                print(model_name + " not ready yet, retrying in " + str(sleep_seconds) + " seconds.")
            except ModelNotFound:
                pass
        print("Model not available during polling, set a higher value for synchronous_timeout to wait longer.")

    return export_dir