Esempio n. 1
0
def pruning(arch_name, masks):
    masks_file = ensure_dir(root_dir() / "masks" / (arch_name + ".pkl"))
    if masks_file.exists():
        with open(masks_file, "rb") as file:
            mask_values = pickle.load(file)
    else:
        mask_values = {}

    pretrained_model_dir = root_dir() / "downloads" / "model" / arch_name
    model_dir = root_dir() / "train" / arch_name

    warm_start_hook = WarmStartHook(pretrained_model_dir, model_dir)
    pruning_hook = PruningHook(masks, mask_values)

    estimator = tf.estimator.Estimator(
        model_fn=partial(
            model_fn,
            pruning_hook=pruning_hook,
            pretrained_model_dir=pretrained_model_dir,
        ),
        model_dir=model_dir,
    )
    input_fn = tf.estimator.inputs.numpy_input_fn(
        np.random.rand(*input_shapes[arch_name]).astype("f"),
        np.random.randint(1000, size=(1, 1)),
        batch_size=1,
        shuffle=False,
    )

    estimator.train(input_fn=input_fn, hooks=[warm_start_hook, pruning_hook])

    with open(masks_file, "wb") as file:
        pickle.dump(pruning_hook.mask_values, file)
    return
Esempio n. 2
0
    def execute(self, note):
        main_panel = note.getBody()

        # first running the webserver
        # self.facade.registerProxy(model.WebServerProxy(main_panel,
        #                                               config={'host': '127.0.0.1', 'port': 12345,
        #                                                       #'path': '%s/ui/html' % utils.root_dir()
        #                                                       'path': None,
        #                                                        },
        #                                               http_root_obj=http_root.MainHttpRoot(),
        #                                               orm_base_obj=vo.Base,
        #                                               db_plugin=utils.db_str_conn('mainapp.db')),)
        self.facade.registerProxy(model.WebServerProxy(main_panel))

        with open("%s/plugins.yaml" % utils.root_dir()) as stream:
            cfg = yaml.safe_load(stream)
            if cfg:
                # register startup class
                for key, val in cfg.iteritems():
                    try:
                        plugin_facade_cls = utils.class_for_name(val["module"] + ".main", "PluginFacade")
                        facade = plugin_facade_cls.getInstance(key=key)
                        if facade:
                            logging.info("%s: plugin loaded" % key)
                            facade.PLUGIN_NAME = key
                        # calling startup
                        facade.sendNotification(facade.STARTUP, main_panel)
                    except Exception, e:
                        logging.info("%s: %s" % (key, e.message))
Esempio n. 3
0
 def load_form(self, params):
     current_path = utils.root_dir()
     if 'postfix_path' in params:
         current_path +=  params["postfix_path"]
     variables = params["vars"] if "vars" in params else {}
     content = pages.retrieve(params["name"], BASE_PATH=current_path, ASIDE_JS=None, TITLE=params["title"],
                              **variables)
     self.viewComponent.webView.setHtml(*content)
Esempio n. 4
0
    def execute(self, note):
        main_panel = note.getBody()
        main_panel.on_shutdown.signal.connect(StartupCommand._on_shutdown)

        main_facade = AsideFacade.getInstance(key=MAIN_APP_KEY)
        web_proxy = main_facade.retrieveProxy(model.WebServerProxy.NAME)
        path = "%s/%s" % (utils.root_dir(), self.facade.PLUGIN_DIR)
        static_dir = '%s/html' % (path,)
        web_proxy.tree_mount(self.facade.PLUGIN_NAME, http_root.QueueHTTPRoot(), static_dir,
                             orm_base_obj=vo.Base,
                             db_plugin=utils.db_str_conn("queue.db", path=path))
Esempio n. 5
0
from utils import root_dir, nice_json
from flask import Flask
from werkzeug.exceptions import NotFound
import json

app = Flask(__name__)

with open("{}/database/showtimes.json".format(root_dir()), "r") as f:
    showtimes = json.load(f)


# no argument was passed, so this is a dummy placeholder
@app.route("/", methods=['GET'])
def hello():
    return nice_json({
        "uri": "/",
        "subresource_uris": {
            "showtimes": "/showtimes",
            "showtime": "/showtimes/<date>"
        }
    })


# if no date arugment is passed, then return all possible showtimes
@app.route("/showtimes", methods=['GET'])
def showtimes_list():
    return nice_json(showtimes)


# if a date argument is passed, then use it to fetch the approprate key/value pair
@app.route("/showtimes/<date>", methods=['GET'])
Esempio n. 6
0
 def test_root_dir(self):
     # FIXME: How to test this without either hardcoding the path or
     # doing the same work as root_dir() ?
     self.assertEqual(utils.root_dir(),
         "/Users/stain/Documents/i/src/forgetmp3")
Esempio n. 7
0
def download_tf_model(arch_name, model_dir):
    full_model_dir = root_dir() / "downloads" / model_dir
    if not full_model_dir.exists():
        full_model_dir.mkdir(mode=0o755, parents=True, exist_ok=True)
    if not (full_model_dir / arch_name / "checkpoint").exists():
        download_arch(arch_name, str(full_model_dir / arch_name) + "/")
Esempio n. 8
0
def main(_):
    now = time.strftime("%Y-%m-%d-%H_%M_%S", time.localtime(time.time()))

    arch_name = FLAGS.arch_name

    if FLAGS.init_checkpoint == None or FLAGS.init_checkpoint == "":
        pretrained_model_dir = root_dir() / "downloads" / "model" / arch_name
    else:
        pretrained_model_dir = FLAGS.init_checkpoint

    if FLAGS.pre_masks_dir == None or FLAGS.init_checkpoint == "":
        all_mask_values = {}
    else:
        with open(FLAGS.pre_masks_dir, "rb") as file:
            all_mask_values = pickle.load(file)

    if FLAGS.output_dir == None:
        model_dir = root_dir() / "train" / arch_name / now
    else:
        model_dir = root_dir() / "train" / FLAGS.output_dir

    if FLAGS.data_dir != None:
        data_dir = FLAGS.data_dir
    else:
        raise ValueError("data_dir must set")

    if FLAGS.pruning_type != "":
        pruning_type = FLAGS.pruning_type
    else:
        raise ValueError("pruning_type must set")

    tf.logging.info("Pruning_type : %s\n" % pruning_type)

    if "tw" in pruning_type or "bw" in pruning_type:
        masks = [
            "vgg_16/conv1/conv1_1/weights:0",
            "vgg_16/conv1/conv1_2/weights:0",
            "vgg_16/conv2/conv2_1/weights:0",
            "vgg_16/conv2/conv2_2/weights:0",
            "vgg_16/conv3/conv3_1/weights:0",
            "vgg_16/conv3/conv3_2/weights:0",
            "vgg_16/conv3/conv3_3/weights:0",
            "vgg_16/conv4/conv4_1/weights:0",
            "vgg_16/conv4/conv4_2/weights:0",
            "vgg_16/conv4/conv4_3/weights:0",
            "vgg_16/conv5/conv5_1/weights:0",
            "vgg_16/conv5/conv5_2/weights:0",
            "vgg_16/conv5/conv5_3/weights:0",
            # "vgg_16/fc6/weights:0",
            # "vgg_16/fc7/weights:0",
            # "vgg_16/fc8/weights:0",
        ]
    else:
        masks = [
            "vgg_16/conv1/conv1_1/weights:0",
            "vgg_16/conv1/conv1_2/weights:0",
            "vgg_16/conv2/conv2_1/weights:0",
            "vgg_16/conv2/conv2_2/weights:0",
            "vgg_16/conv3/conv3_1/weights:0",
            "vgg_16/conv3/conv3_2/weights:0",
            "vgg_16/conv3/conv3_3/weights:0",
            "vgg_16/conv4/conv4_1/weights:0",
            "vgg_16/conv4/conv4_2/weights:0",
            "vgg_16/conv4/conv4_3/weights:0",
            "vgg_16/conv5/conv5_1/weights:0",
            "vgg_16/conv5/conv5_2/weights:0",
            "vgg_16/conv5/conv5_3/weights:0",
            # "vgg_16/fc6/weights:0",
            # "vgg_16/fc7/weights:0",
            # "vgg_16/fc8/weights:0",
        ]
    if arch_name == "alexnet":
        masks = [
            "conv2d/kernel:0",
            "conv2d_1/kernel:0",
            "conv2d_2/kernel:0",
            "conv2d_3/kernel:0",
            "conv2d_4/kernel:0",
            # "dense/kernel:0",
            # "dense_1/kernel:0",
            # "dense_2/kernel:0",
        ]

    batch_size = int(FLAGS.batch_size)
    num_epochs = int(FLAGS.num_train_epochs)
    if FLAGS.score_type == "weight":
        is_masked_weight = True
    else:
        is_masked_weight = False
    pruning_steps = int(FLAGS.pruning_steps)
    finetune_steps = int(FLAGS.finetune_steps)
    mini_finetune_steps = int(FLAGS.mini_finetune_steps)
    if is_masked_weight:
        tf.logging.info("Pruning score : Weight ! \n")
    else:
        tf.logging.info("Pruning score : Taylor ! \n")

    if "tw" in pruning_type:
        pruning_layers = [[0, 1], [2, 3], [4, 5, 6], [7, 8, 9], [10, 11, 12]]
        sparsity_stages = [25, 40, 50, 60, 65, 70, 75, 80, 85, 90, 92, 94, 96]
    if "ew" in pruning_type:
        pruning_layers = [[0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12]]
        sparsity_stages = [25, 40, 50, 60, 65, 70, 75, 80, 85, 90, 92, 94, 96]
    if "vw" in pruning_type:
        pruning_layers = [[0, 1], [2, 3], [4, 5, 6], [7, 8, 9], [10, 11, 12]]
        sparsity_stages = [25, 40, 50, 60, 65, 70, 75, 80, 85, 90, 92, 94, 96]

    if "bw" in pruning_type:
        pruning_layers = [[0, 1], [2, 3], [4, 5, 6], [7, 8, 9], [10, 11, 12]]
        sparsity_stages = [25, 40, 50, 60, 65, 70, 75, 80, 85, 90, 92, 94, 96]

    early_stop = [100, 100, 100, 100, 100, 100]

    previous_checkpoint = tf.train.latest_checkpoint(pretrained_model_dir)
    previous_mask_values = copy.deepcopy(all_mask_values)

    # Set hooks
    pruning_hook = PruningHook(masks, pruning_type, is_masked_weight,
                               previous_mask_values)

    # Set up training hook that logs the training accuracy every 100 steps.
    tensors_to_log = {
        "train_accuracy": "train_accuracy",
        "train_accuracy_top_5": "train_accuracy_top_5",
    }
    logging_hook = tf.train.LoggingTensorHook(tensors=tensors_to_log,
                                              every_n_iter=100)

    session_config = tf.ConfigProto(allow_soft_placement=True)
    session_config.gpu_options.allow_growth = True
    estimator_config = tf.estimator.RunConfig(
        session_config=session_config,
        save_checkpoints_secs=60 * 60 * 24 * 5,  # 5 days
        save_checkpoints_steps=None,
        keep_checkpoint_max=len(sparsity_stages) + 3,
    )

    if arch_name == "vgg16":
        pre_fn = preprocess_image
    if arch_name == "alexnet":
        pre_fn = alexnet_preprocess_image

    # Set input_fn
    def eval_input_fn():
        return imagenet.test(
            data_dir,
            batch_size,
            num_parallel_calls=40,
            transform_fn=lambda ds: ds.map(lambda image, label:
                                           (image, label - 1)),
            preprocessing_fn=pre_fn)

    def train_input_fn():
        return imagenet.train(
            data_dir,
            batch_size,
            is_shuffle=True,
            num_epochs=num_epochs,
            num_parallel_calls=40,
            transform_fn=lambda ds: ds.map(lambda image, label:
                                           (image, label - 1)),
            preprocessing_fn=pre_fn)

    warm_start_settings = tf.estimator.WarmStartSettings(previous_checkpoint)
    estimator = tf.estimator.Estimator(
        model_fn=partial(model_fn, pruning_hook=None, is_pruning=False),
        config=estimator_config,
        warm_start_from=warm_start_settings,
    )

    tf.logging.info('First Evaluation !!!')
    eval_results = estimator.evaluate(input_fn=eval_input_fn)
    tf.logging.info('Done First Evaluation !!!\n')
    tf.logging.info("First Evaluation results:\n\n\t%s\n" % eval_results)

    ## Multi stages to prune the CNN model
    for stage in range(len(sparsity_stages)):
        stage_dir = model_dir / ("sparsity_stage_" +
                                 str(sparsity_stages[stage]))

        sparsity = sparsity_stages[stage]
        pruning_hook.sparsity = sparsity

        tf.logging.info("\n\n\n\n\n\n\n\n\n\n\n\n")
        tf.logging.info("Stage : %d, Sparsity : %f, Finetune_steps : %d" %
                        (stage, sparsity, finetune_steps))
        tf.logging.info("early_stop:")
        tf.logging.info(early_stop)

        is_early_stop = True
        for layer in range(len(pruning_layers)):
            # if "ew" in pruning_type:
            #     sparsity = sparsity_layers[layer]
            pruning_hook.sparsity = sparsity
            tf.logging.info("\n\n\n\n\n")
            if early_stop[layer] < sparsity:
                continue
            is_early_stop = False

            layer_dir = stage_dir / ("layer_" + str(layer))
            masks_dir = layer_dir / "masks"
            os.makedirs(masks_dir)

            masks_now = pruning_layers[layer]

            assert (previous_checkpoint)
            warm_start_settings = tf.estimator.WarmStartSettings(
                previous_checkpoint)
            pruning_hook.masks_now = masks_now
            pruning_hook.mask_values = copy.deepcopy(previous_mask_values)

            # Set estimator
            pruning_estimator = tf.estimator.Estimator(
                model_fn=partial(model_fn,
                                 pruning_hook=pruning_hook,
                                 is_pruning=True),
                model_dir=layer_dir,
                warm_start_from=warm_start_settings,
                config=estimator_config,
            )

            tf.logging.info('Prune !!!')
            pruning_hook.initialize(pruning_steps)
            pruning_estimator.train(input_fn=train_input_fn,
                                    hooks=[logging_hook, pruning_hook],
                                    steps=pruning_steps)
            tf.logging.info('Done Prune !!!\n')

            # Set fine-tune estimator
            # The mask_values must be updated by estimator_pruning.train().
            # The mask_values in estimator is constant,
            # then we build the estimator in every iteration for fine-tune.
            estimator = tf.estimator.Estimator(
                model_fn=partial(model_fn,
                                 pruning_hook=pruning_hook,
                                 is_pruning=False),
                model_dir=layer_dir,
                config=estimator_config,
            )

            mini_train_times = 0
            while (True):
                tf.logging.info('Mini Fine Tune !!!')
                estimator.train(input_fn=train_input_fn,
                                hooks=[logging_hook],
                                steps=mini_finetune_steps)
                tf.logging.info('Done Mini Fine Tune !!!\n')

                tf.logging.info('Evaluation !!!')
                eval_results = estimator.evaluate(input_fn=eval_input_fn, )

                tf.logging.info(
                    "Pruning Sparsity %d Layer %d Evaluation results:\n\n\t%s\n"
                    % (sparsity, layer, eval_results))
                tf.logging.info('Done Evaluation !!!\n')

                threshold = 0.8885
                if sparsity > 75:
                    threshold = 0.88818
                if sparsity > 85:
                    threshold = 0.0

                if arch_name == "alexnet":
                    threshold = 0.775

                if eval_results["accuracy_top_5"] > threshold:
                    previous_checkpoint = tf.train.latest_checkpoint(layer_dir)
                    previous_mask_values = copy.deepcopy(
                        pruning_hook.mask_values)
                    tf.logging.info('GOOD CHECKPOINT!!!\n')
                    with open(
                            masks_dir /
                        ("goog_mask_" + str(sparsity) + ".pkl"), "wb") as file:
                        pickle.dump(pruning_hook.mask_values, file)

                    break
                else:
                    tf.logging.info('BAD CHECKPOINT!!!\n')
                    with open(
                            masks_dir / ("bad_mask_" + str(sparsity) + ".pkl"),
                            "wb") as file:
                        pickle.dump(pruning_hook.mask_values, file)

                mini_train_times += 1

                if (mini_train_times > 8):
                    early_stop[layer] = sparsity
                    break

        if is_early_stop:
            break

        tf.logging.info("\n\n\n\n\n")
        all_layer_dir = stage_dir / "all_layers"
        masks_dir = all_layer_dir / "masks"
        os.makedirs(masks_dir)

        assert (previous_checkpoint)
        warm_start_settings = tf.estimator.WarmStartSettings(
            previous_checkpoint)
        pruning_hook.mask_values = copy.deepcopy(previous_mask_values)

        # Set fine-tune estimator
        # The mask_values must be updated by estimator_pruning.train().
        # The mask_values in estimator is constant,
        # then we build the estimator in every iteration for fine-tune.
        estimator = tf.estimator.Estimator(
            model_fn=partial(
                model_fn,
                pruning_hook=pruning_hook,
                is_pruning=False,
            ),
            model_dir=all_layer_dir,
            config=estimator_config,
            warm_start_from=warm_start_settings,
        )

        tf.logging.info('Fine Tune !!!')
        estimator.train(input_fn=train_input_fn,
                        hooks=[logging_hook],
                        steps=finetune_steps)
        tf.logging.info('Done Fine Tune !!!\n')

        tf.logging.info('Last Evaluation !!!')
        eval_results = estimator.evaluate(input_fn=eval_input_fn)
        tf.logging.info('Done Evaluation !!!\n')
        tf.logging.info("After Pruning Evaluation results:\n\n\t%s\n" %
                        eval_results)

        previous_checkpoint = tf.train.latest_checkpoint(all_layer_dir)
        previous_mask_values = copy.deepcopy(pruning_hook.mask_values)
        with open(masks_dir / ("mask_" + str(sparsity) + ".pkl"),
                  "wb") as file:
            pickle.dump(pruning_hook.mask_values, file)

    return
Esempio n. 9
0
from utils import root_dir, nice_json
from flask import Flask
from werkzeug.exceptions import NotFound
import json

app = Flask(__name__)

# load data into memory;
with open("{}/database/movies.json".format(root_dir()), "r") as f:
    movies = json.load(f)


# app.route binds url to function; a good example of an decorator
@app.route("/", methods=['GET'])
def hello():
    return nice_json({
        "uri": "/",
        "subresource_uris": {
            "movies": "/movies",
            "movie": "/movies/<id>"
        }
    })


# triggered by http://127.0.0.1:5001/movies/7daf7208-be4d-4944-a3ae-c1c2f516f3e6
@app.route("/movies/<movieid>", methods=['GET'])
def movie_info(movieid):
    if movieid not in movies:
        raise NotFound
    result = movies[movieid]
    result["uri"] = "/movies/{}".format(movieid)