def compute_scores():
    model_name = "vgg16"
    dataset_name = "iconic200"
    metric_name = "cosine"
    layer_name = "block1_conv1"
    topx = 15

    sim_base_path = "app/public/assets/semsearch/similarity"
    score_base_path = "app/public/assets/semsearch/scores"

    similarity_metrics = feat_utils.list_distance_metrics()

    model_architectures = m_utils.get_supported_models()
    dataset_details = d_utils.get_supported_datasets()
    dataset_result_holder = {}
    for dataset_detail in dataset_details:
        dataset_name = dataset_detail["name"]
        model_result_holder = {}
        for model_detail in model_architectures:
            model_name = model_detail["name"]
            metric_holder = {}
            for metric_name in similarity_metrics:
                layer_names = m_utils.get_model_layer_names(model_name)
                layer_score_holder = {}
                score_path = os.path.join(score_base_path, dataset_name,
                                          model_name)
                f_utils.mkdir(score_path)
                score_path = os.path.join(score_path, metric_name + ".json")

                for layer_name in layer_names:
                    class_details = m_utils.get_class_details(dataset_name)
                    sim_path = os.path.join(sim_base_path, dataset_name,
                                            model_name, metric_name,
                                            layer_name + ".json")

                    print(sim_path)
                    sim_details = f_utils.load_json_file(sim_path)
                    model_score_per_image_holder = []
                    for i in range(len(sim_details)):
                        main_image = str(i)
                        each_sim = sim_details[main_image][1:topx + 1]
                        model_score = m_utils.compute_performance(
                            each_sim, main_image, class_details)
                        model_score_per_image_holder.append(model_score * 100)

                    # model_score_per_image_holder
                    layer_score_holder[
                        layer_name] = model_score_per_image_holder

                metric_holder[metric_name] = layer_score_holder
                f_utils.save_json_file(score_path, layer_score_holder)
            model_result_holder[model_name] = metric_holder
        dataset_result_holder[dataset_name] = model_result_holder
    print("Score generation complete")
    score_save_path = "app/src/assets/semsearch/modelscores.json"
    f_utils.save_json_file(score_save_path, dataset_result_holder)
def generate_model_details():
    model_details = m_utils.get_all_model_details()
    dataset_details = d_utils.get_supported_datasets()
    semsearch_details = {
        "models": model_details,
        "datasets": dataset_details,
        "metrics": feat_utils.list_distance_metrics()
    }
    # print(semsearch_details)
    semsearch_details_save_path = os.path.join(base_path_src, "details.json")
    f_utils.save_json_file(semsearch_details_save_path, semsearch_details)
    tf.logging.info(">> Finished saving model dteails " +
                    semsearch_details_save_path)
def generate_dataset(dataset_params):
    if dataset_params["name"] == "cifar100":
        dataset_root_dir = dataset_params["path"]
        train_path = os.path.join(dataset_root_dir, "train")
        test_path = os.path.join(dataset_root_dir, "test")

        #   download CIFAR100 files from the keras dataset repo
        (x_train, y_train), (x_test,
                             y_test) = cifar100.load_data(label_mode='fine')

        # creating train and test folder
        save_files(train_path, x_train)
        save_files(test_path, x_test)

        tf.logging.info("  >> Cifar images saved to  datasets directory " +
                        dataset_root_dir)
    elif dataset_params["name"] == "cifar10":
        class_details = []
        (x_train, y_train), (x_test, y_test) = cifar10.load_data()

        category_counter = {}
        num_per_category = round(dataset_params["dataset_size"] / 10)
        c_counter = 0
        f_utils.mkdir(dataset_params["path"])

        for i, val in enumerate(list(y_train)):
            val = val[0]
            if (val in category_counter.keys()):
                if (category_counter[val] < num_per_category):
                    class_details.append({str(c_counter): str(val)})
                    category_counter[val] = category_counter[val] + 1
                    img = Image.fromarray(x_train[i], 'RGB')
                    img.save(dataset_params["path"] + "/" + str(c_counter) +
                             '.jpg')
                    c_counter += 1
                    if c_counter >= dataset_params["dataset_size"]:
                        break
            else:
                category_counter[val] = 0

        f_utils.save_json_file(
            os.path.join(dataset_params["path"], "classes.json"),
            class_details)

        tf.logging.info("  >> Cifar10 images saved to  datasets directory " +
                        dataset_params["path"])
def process_dataset_labels():
    dataset_path = "app/public/assets/semsearch/datasets"
    dataset_names = os.listdir(dataset_path)
    main_holder = {}
    all_holder = {}
    all_main_dict_holder = {}
    dict_list_holder = {}
    for dataset_name in dataset_names:
        if (dataset_name != ".DS_Store"):
            print(dataset_name)
            class_detail_holder = {}
            class_main_dict = {}
            class_details = f_utils.load_json_file(
                os.path.join(dataset_path, dataset_name, "classes.json"))

            for detail in class_details:

                class_name = list(detail.items())[0][1]
                class_member = list(detail.items())[0][0]
                class_main_dict[class_member] = class_name
                # print(class_name)
                if class_name not in class_detail_holder:
                    class_detail_holder[class_name] = [class_member]
                else:
                    temp = class_detail_holder[class_name]
                    temp.append(class_member)
                    class_detail_holder[class_name] = temp

            # print(class_main_dict)
            all_holder[dataset_name] = class_detail_holder
            all_main_dict_holder[dataset_name] = class_main_dict
            class_list = list(class_detail_holder.keys())
            class_list.sort()
            dict_list_holder[dataset_name] = class_list
    print(dict_list_holder)
    out_path = "app/src/assets/semsearch/"
    main_holder["classes"] = all_holder
    main_holder["dictionary"] = all_main_dict_holder
    main_holder["classlist"] = dict_list_holder

    f_utils.save_json_file(os.path.join(out_path, "datasetdictionary.json"),
                           main_holder)
    tf.logging.info(" >> Fininshed generating class dictionaries")
def process_dataset(dataset_path):
    class_names = os.listdir(dataset_path)
    path_holder = []
    print(class_names)
    for class_name in class_names:
        if class_name != ".DS_Store":
            f_path = (os.path.join(dataset_path, class_name))
            f_names = os.listdir(f_path)
            for f_name in f_names:
                if f_name != ".DS_Store":
                    path = os.path.join(f_path, f_name)
                    path_holder.append({"path": path, "class": class_name})

    print(len(path_holder))
    class_details = []
    numer_holder = [(i) for i in range(len(path_holder))]
    for i, path in enumerate(path_holder):
        class_details.append({i: path["class"]})
        copyfile(path["path"], os.path.join(dataset_path, str(i) + ".jpg"))

    f_utils.save_json_file(os.path.join(dataset_path, "classes.json"),
                           class_details)
Example #6
0
def save_similarity_scores(similarity_output_dir, layer_name, similarity_scores):

    f_utils.mkdir(similarity_output_dir)
    json_file_path = os.path.join(similarity_output_dir, layer_name) + ".json"
    f_utils.save_json_file(json_file_path, similarity_scores)
Example #7
0
def save_embeddings(embedding_output_dir, embedding_name, embedding):
    f_utils.mkdir(embedding_output_dir)
    json_file_path = os.path.join(
        embedding_output_dir, embedding_name) + ".json"
    f_utils.save_json_file(json_file_path, embedding)
Example #8
0
def generate_umap_embeddings(umap_params, extracted_features):
    embedding = umap.UMAP().fit_transform(extracted_features)
    f_utils.mkdir(umap_params["output_dir"])
    json_file_path = os.path.join(
        umap_params["output_dir"], umap_params["layer_name"]) + ".json"
    f_utils.save_json_file(json_file_path, embedding)
def curate_interesting():
    main_dict = {}
    models_path = "app/public/assets/models"
    models = os.listdir(models_path)
    models.sort()
    sample_objects = [
        {
            "title": "colors",
            "images": [{
                "layer": "",
                "image": 0
            }, {
                "layer": "",
                "image": 1
            }]
        },
        {
            "title": "edges",
            "images": [{
                "layer": "",
                "image": 0
            }, {
                "layer": "",
                "image": 1
            }]
        },
        {
            "title": "textures",
            "images": [{
                "layer": "",
                "image": 0
            }, {
                "layer": "",
                "image": 1
            }]
        },
        {
            "title": "patterns",
            "images": [{
                "layer": "",
                "image": 0
            }, {
                "layer": "",
                "image": 1
            }]
        },
        {
            "title": "parts",
            "images": [{
                "layer": "",
                "image": 0
            }, {
                "layer": "",
                "image": 1
            }]
        },
        {
            "title": "objects",
            "images": [{
                "layer": "",
                "image": 0
            }, {
                "layer": "",
                "image": 1
            }]
        },
    ]
    for model in models:
        if model != ".DS_Store":
            # layers = os.listdir(os.path.join(models_path, model))
            # layers.sort()
            # layer_holder = {}
            # for layer in layers:
            #     if layer != ".DS_Store":
            #         layer_holder[layer] = sample_objects

            main_dict[model] = sample_objects

    print(main_dict)
    f_utils.save_json_file("app/src/assets/models/interesting1.json",
                           main_dict)
def get_model_viz_details(model_params):

    model_name = "vgg16"

    # print(layer_list, dir_path)
    layer_details = f_utils.load_json_file(
        "app/src/assets/models/layer_details.json")

    model_holder = []
    all_detail_holder = {}
    model_dir_names = os.listdir(model_params["model_dir"])
    if (".DS_Store" in model_dir_names):
        model_dir_names.remove(".DS_Store")
    for model_name in model_dir_names:
        detail_holder = {}
        model_layers_dict = layer_details[model_name]

        sumparams = 0
        for key, value in model_layers_dict.items():
            sumparams = sumparams + model_layers_dict[key]["parametercount"]

        dir_path = os.path.join(model_params["model_dir"], model_name)
        f_utils.mkdir(dir_path)
        layer_list = os.listdir(dir_path)
        if (".DS_Store" in layer_list):
            layer_list.remove(".DS_Store")
        # layer_list.sort()
        layer_array = []
        all_layer_array = []
        for layer in model_layers_dict.keys():
            rowval = model_layers_dict[layer]
            rowval["id"] = str(rowval["layer_index"]) + ""
            all_layer_array.append(rowval)
        for layer in layer_list:
            if layer in model_layers_dict:
                layer_val = model_layers_dict[layer]
                layer_val["modelparameters"] = count_intermediate_params(
                    model_layers_dict, layer)
                layer_array.append(layer_val)

            # if (layer)
            neuron_list = os.listdir(os.path.join(dir_path, layer))
            neuron_list = [x.split(".")[0] for x in neuron_list]

            neuron_list.sort(key=float)
            detail_holder[layer] = neuron_list
        layer_array = sorted(layer_array, key=lambda i: i["layer_index"])
        layer_param_count = sum([r["parametercount"] for r in layer_array])
        print(model_name, sumparams)
        # break
        all_layer_array = sorted(all_layer_array,
                                 key=lambda i: i["layer_index"])
        model_holder.append({
            "name": model_name,
            "layers": layer_array,
            "modelparameters": sumparams,
            "numlayers": len(model_layers_dict),
            "all_layers": all_layer_array
        })
        all_detail_holder[model_name] = detail_holder
    model_holder = sorted(model_holder, key=lambda i: i["modelparameters"])
    model_holder = {"models": model_holder}
    f_utils.save_json_file("app/src/assets/models/model_details.json",
                           model_holder)

    f_utils.save_json_file(model_params["output_path"], all_detail_holder)
    tf.logging.info("  >> Finished saving model and layer details")