Exemple #1
0
def get_movie_by_id(mid, path):
    idx = 0
    mkdir(path)
    m = Movie(mid)
    for refer, photo_url in m.photos():
        name = os.path.basename(photo_url)
        full_path = path + '/' + name
        if os.path.exists(full_path):
            print('pic {} exist skip'.format(name))
            continue
        # print('{}: saving {}'.format(idx, name))
        headers = {
            'Referer':
            refer,
            "User-Agent":
            'Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/59.0.3071.115 Safari/537.36'
        }
        # file_utils.save_from_url(photo_url, headers, full_path)
        threadPoolExecutor.submit(file_utils.save_from_url,
                                  url=photo_url,
                                  headers=headers,
                                  name=full_path,
                                  index=idx)
        idx += 1
    print('saving movie photos to {}'.format(path))
def compute_scores():
    model_name = "vgg16"
    dataset_name = "iconic200"
    metric_name = "cosine"
    layer_name = "block1_conv1"
    topx = 15

    sim_base_path = "app/public/assets/semsearch/similarity"
    score_base_path = "app/public/assets/semsearch/scores"

    similarity_metrics = feat_utils.list_distance_metrics()

    model_architectures = m_utils.get_supported_models()
    dataset_details = d_utils.get_supported_datasets()
    dataset_result_holder = {}
    for dataset_detail in dataset_details:
        dataset_name = dataset_detail["name"]
        model_result_holder = {}
        for model_detail in model_architectures:
            model_name = model_detail["name"]
            metric_holder = {}
            for metric_name in similarity_metrics:
                layer_names = m_utils.get_model_layer_names(model_name)
                layer_score_holder = {}
                score_path = os.path.join(score_base_path, dataset_name,
                                          model_name)
                f_utils.mkdir(score_path)
                score_path = os.path.join(score_path, metric_name + ".json")

                for layer_name in layer_names:
                    class_details = m_utils.get_class_details(dataset_name)
                    sim_path = os.path.join(sim_base_path, dataset_name,
                                            model_name, metric_name,
                                            layer_name + ".json")

                    print(sim_path)
                    sim_details = f_utils.load_json_file(sim_path)
                    model_score_per_image_holder = []
                    for i in range(len(sim_details)):
                        main_image = str(i)
                        each_sim = sim_details[main_image][1:topx + 1]
                        model_score = m_utils.compute_performance(
                            each_sim, main_image, class_details)
                        model_score_per_image_holder.append(model_score * 100)

                    # model_score_per_image_holder
                    layer_score_holder[
                        layer_name] = model_score_per_image_holder

                metric_holder[metric_name] = layer_score_holder
                f_utils.save_json_file(score_path, layer_score_holder)
            model_result_holder[model_name] = metric_holder
        dataset_result_holder[dataset_name] = model_result_holder
    print("Score generation complete")
    score_save_path = "app/src/assets/semsearch/modelscores.json"
    f_utils.save_json_file(score_save_path, dataset_result_holder)
def _copy_files(ori_root, des_root):
    logging.warning(f'_copy_files {ori_root} {des_root}')
    ori_len = len(ori_root)
    for dir_path, dir_names, file_names in os.walk(ori_root):
        des_dir_path = des_root + dir_path[ori_len:]
        file_utils.mkdir(des_dir_path)
        for file in file_names:
            if os.path.isfile(os.path.join(dir_path, file)):
                file_utils.cp(os.path.join(dir_path, file),
                              os.path.join(des_dir_path, file),
                              is_binary=True)
Exemple #4
0
 def create_root(self):
     """
     创建任务的测试报告目录
     :return:
     """
     self._gn_name = self.name + '_' + self.create_time
     self._root_prefix = self.device.tv_com + '_'
     self.root_pc = os.path.join(constant.REPORTS_ROOT,
                                 self._root_prefix + self._gn_name)
     file_utils.mkdir(self.root_pc)
     self.root_tv = constant.REPORTS_ROOT_TV + '/' + self._root_prefix + self._gn_name
     tv_utlils.mkdir(self.device, constant.REPORTS_ROOT_TV)
     tv_utlils.mkdir(self.device, self.root_tv)
Exemple #5
0
def get_album(album, path):
    idx = 0
    file_utils.mkdir(path)
    for photo_url in album.photos():
        name = os.path.basename(photo_url)
        full_path = path + '/' + name
        if os.path.exists(full_path):
            print('pic {} exist skip'.format(name))
            continue
        # print("{}: submit {}".format(idx, name))
        threadPoolExecutor.submit(file_utils.save_from_url, url=photo_url, headers=headers, name=full_path, index=idx)
        # file_utils.save_from_url(photo_url, headers, path + '/' + name)
        idx += 1
    print("saving album to {}, total {} images".format(path, idx))
def save_files(directory_path, images):
    """[Save a list of image files to a given dataset filepath]

    Arguments:
        directory_path {string} -- [file path to save dataset]
        images {list string} -- [description]
    """

    if not os.path.exists(directory_path):
        f_utils.mkdir(directory_path)
        for i, img in enumerate(images):
            img = Image.fromarray(images[i], 'RGB')
            img.save(directory_path + "/" + str(i) + '.jpg')
        tf.logging.info("  >> Finished saving images to path " +
                        directory_path)
def generate_dataset(dataset_params):
    if dataset_params["name"] == "cifar100":
        dataset_root_dir = dataset_params["path"]
        train_path = os.path.join(dataset_root_dir, "train")
        test_path = os.path.join(dataset_root_dir, "test")

        #   download CIFAR100 files from the keras dataset repo
        (x_train, y_train), (x_test,
                             y_test) = cifar100.load_data(label_mode='fine')

        # creating train and test folder
        save_files(train_path, x_train)
        save_files(test_path, x_test)

        tf.logging.info("  >> Cifar images saved to  datasets directory " +
                        dataset_root_dir)
    elif dataset_params["name"] == "cifar10":
        class_details = []
        (x_train, y_train), (x_test, y_test) = cifar10.load_data()

        category_counter = {}
        num_per_category = round(dataset_params["dataset_size"] / 10)
        c_counter = 0
        f_utils.mkdir(dataset_params["path"])

        for i, val in enumerate(list(y_train)):
            val = val[0]
            if (val in category_counter.keys()):
                if (category_counter[val] < num_per_category):
                    class_details.append({str(c_counter): str(val)})
                    category_counter[val] = category_counter[val] + 1
                    img = Image.fromarray(x_train[i], 'RGB')
                    img.save(dataset_params["path"] + "/" + str(c_counter) +
                             '.jpg')
                    c_counter += 1
                    if c_counter >= dataset_params["dataset_size"]:
                        break
            else:
                category_counter[val] = 0

        f_utils.save_json_file(
            os.path.join(dataset_params["path"], "classes.json"),
            class_details)

        tf.logging.info("  >> Cifar10 images saved to  datasets directory " +
                        dataset_params["path"])
Exemple #8
0
def get_celebrity(celebrity, path):
    idx = 0
    file_utils.mkdir(path)
    for refer, photo_url in celebrity.photos():
        name = os.path.basename(photo_url)
        full_path = path + '/' + name
        if os.path.exists(full_path):
            print('pic {} exist skip'.format(name))
            continue
        # print("{}: saving {}".format(idx, name))
        headers = {
            "Referer": refer,
            "user-agent": "Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/59.0.3071.104 Safari/537.36"
        }
        threadPoolExecutor.submit(file_utils.save_from_url, url=photo_url, headers=headers, name=full_path, index=idx)
        # file_utils.save_from_url(photo_url, headers, path + '/' + name)
        idx += 1

    print("Finish parsing celebrity pages, all file will save to {}".format(path))
Exemple #9
0
def save_similarity_scores(similarity_output_dir, layer_name, similarity_scores):

    f_utils.mkdir(similarity_output_dir)
    json_file_path = os.path.join(similarity_output_dir, layer_name) + ".json"
    f_utils.save_json_file(json_file_path, similarity_scores)
Exemple #10
0
def save_embeddings(embedding_output_dir, embedding_name, embedding):
    f_utils.mkdir(embedding_output_dir)
    json_file_path = os.path.join(
        embedding_output_dir, embedding_name) + ".json"
    f_utils.save_json_file(json_file_path, embedding)
Exemple #11
0
def generate_umap_embeddings(umap_params, extracted_features):
    embedding = umap.UMAP().fit_transform(extracted_features)
    f_utils.mkdir(umap_params["output_dir"])
    json_file_path = os.path.join(
        umap_params["output_dir"], umap_params["layer_name"]) + ".json"
    f_utils.save_json_file(json_file_path, embedding)
def get_model_viz_details(model_params):

    model_name = "vgg16"

    # print(layer_list, dir_path)
    layer_details = f_utils.load_json_file(
        "app/src/assets/models/layer_details.json")

    model_holder = []
    all_detail_holder = {}
    model_dir_names = os.listdir(model_params["model_dir"])
    if (".DS_Store" in model_dir_names):
        model_dir_names.remove(".DS_Store")
    for model_name in model_dir_names:
        detail_holder = {}
        model_layers_dict = layer_details[model_name]

        sumparams = 0
        for key, value in model_layers_dict.items():
            sumparams = sumparams + model_layers_dict[key]["parametercount"]

        dir_path = os.path.join(model_params["model_dir"], model_name)
        f_utils.mkdir(dir_path)
        layer_list = os.listdir(dir_path)
        if (".DS_Store" in layer_list):
            layer_list.remove(".DS_Store")
        # layer_list.sort()
        layer_array = []
        all_layer_array = []
        for layer in model_layers_dict.keys():
            rowval = model_layers_dict[layer]
            rowval["id"] = str(rowval["layer_index"]) + ""
            all_layer_array.append(rowval)
        for layer in layer_list:
            if layer in model_layers_dict:
                layer_val = model_layers_dict[layer]
                layer_val["modelparameters"] = count_intermediate_params(
                    model_layers_dict, layer)
                layer_array.append(layer_val)

            # if (layer)
            neuron_list = os.listdir(os.path.join(dir_path, layer))
            neuron_list = [x.split(".")[0] for x in neuron_list]

            neuron_list.sort(key=float)
            detail_holder[layer] = neuron_list
        layer_array = sorted(layer_array, key=lambda i: i["layer_index"])
        layer_param_count = sum([r["parametercount"] for r in layer_array])
        print(model_name, sumparams)
        # break
        all_layer_array = sorted(all_layer_array,
                                 key=lambda i: i["layer_index"])
        model_holder.append({
            "name": model_name,
            "layers": layer_array,
            "modelparameters": sumparams,
            "numlayers": len(model_layers_dict),
            "all_layers": all_layer_array
        })
        all_detail_holder[model_name] = detail_holder
    model_holder = sorted(model_holder, key=lambda i: i["modelparameters"])
    model_holder = {"models": model_holder}
    f_utils.save_json_file("app/src/assets/models/model_details.json",
                           model_holder)

    f_utils.save_json_file(model_params["output_path"], all_detail_holder)
    tf.logging.info("  >> Finished saving model and layer details")
Exemple #13
0
def start(pkgName, presureTime):
    global isRunning
    global closeByHandle
    closeByHandle = False
    isRunning = True
    file_utils.deleteDir(constants.PATH_TEMP)
    file_utils.mkdir(constants.PATH_PID)
    file_utils.mkdir(constants.PATH_MEM)
    file_utils.mkdir(constants.PATH_CPU)
    file_utils.mkdir(constants.PATH_FD)
    file_utils.mkdir(constants.PATH_THREAD)
    file_utils.mkdir(constants.PATH_VIEW)
    file_utils.mkdir(constants.PATH_TASK)
    tick(1, pkgName, presureTime)
    pkgArr = pkgName.split("|")
    cpu_manager.start(pkgArr, constants.PATH_TASK)