Exemplo n.º 1
0
 def load(self):
     """Load weights."""
     print_memory("DPL:pre_load")
     r = self.aggregator.load(self.directory)
     print_memory("DPL:post_load")
     logging.info(f"Weights load result: {r}")
     return r
 def load(self, directory):
     try:
         print_memory("FMPARA:pre_rating_load")
         result = self.all_ratings.load(directory)
         print_memory("FMPARA:post_rating_load")
     except FileNotFoundError as e:
         logging.warning(f"No model restore data {e}")
         result = {"status": str(e)}
     return result
Exemplo n.º 3
0
    def handle(self, **options):
        print_memory(stage="Command init")

        features = options["features"]

        if features is None:
            features = VIDEO_FIELDS

        for f in features:
            assert f in VIDEO_FIELDS, f"Feature {f} not recognized, {VIDEO_FIELDS}"

        print(f"Using features {', '.join(features)}")

        for config in options["config"]:
            print("Loading config", config)
            load_gin_config(config)

        # running parallel hparam tuning with Ray
        if options["tune"]:

            def pre_parse():
                """Load django before reading configuration (otherwise have import error)."""
                import os

                os.environ.setdefault("DJANGO_SETTINGS_MODULE",
                                      "django_react.settings")

                import django

                django.setup()

            if options["tune_resume"]:
                gin.bind_parameter('tune_run.resume', True)

            tune_gin(experiment, pre_parse=pre_parse)

        # regular training
        else:
            print_memory(stage="pre-learner init")
            learner_obj = learner()(features=features)

            print_memory(stage="learner created")

            #            print("pre-fit reached... entering infinite loop")
            #            from time import sleep
            #            while True:
            #                sleep(1)
            #
            #            print_mem_epoch = partial(print_memory, stage='EPOCH')
            learner_obj.fit(epochs=options["epochs_override"])

            print_memory(stage="post train")

            learner_obj.update_features()

            print_memory(stage="post update")
    def create_models(self):
        """Create learning models and the aggregator."""
        self.all_ratings = AllRatingsWithCommon(
            experts=self.users,
            objects=self.videos,
            output_features=self.features,
            name="prod",
        )

        print_memory(stage="DPLF:ratings_nodata_created")

        # creating models
        self.user_to_model = {
            user: FeaturelessPreferenceLearningModel(
                expert=user, all_ratings=self.all_ratings
            )
            for user in self.users
        }

        print_memory(stage="DPLF:models_created")

        # before creating the aggregator, filling models with data
        self.user_to_size = {
            user: self.fill_model_data(self.user_to_model[user], user)
            for user in tqdmem(self.users, desc="fill_data")
        }

        # virtual 'common' data
        fplm_common = FeaturelessPreferenceLearningModel(
            expert=AllRatingsWithCommon.COMMON_EXPERT, all_ratings=self.all_ratings
        )
        fplm_common.on_dataset_end()

        print_memory(stage="DPLF:data_filled")

        # resetting the model given the data
        self.all_ratings.reset_model()

        print_memory(stage="DPLF:model_reset_ok")

        # aggregating models
        self.aggregator = FeaturelessMedianPreferenceAverageRegularizationAggregator(
            models=[self.user_to_model[u] for u in self.users]
        )
        self.aggregator.certification_status = self.user_certified

        print_memory(stage="DPLF:aggregator_created")
    def __init__(
        self,
        experts,
        objects,
        output_features,
        name,
        default_rating=None,
        var_init_cls=None,
    ):

        print_memory("ARWC:init")

        # experts
        self.name = name
        self.experts = list(experts) + [self.COMMON_EXPERT]
        self.experts_set = set(self.experts)
        self.aggregate_index = len(self.experts) - 1
        assert len(self.experts) == len(
            self.experts_set), "Duplicate experts are not allowed"
        self.experts_reverse = {
            expert: i
            for i, expert in enumerate(self.experts)
        }

        # features
        self.output_features = list(output_features)
        self.output_dim = len(output_features)

        # objects
        self.objects = list(objects)
        self.objects_set = set(self.objects)
        assert len(self.objects_set) == len(
            self.objects), "Duplicate objects are not allowed."
        self.objects_reverse = {obj: i for i, obj in enumerate(self.objects)}

        # outputs
        self.layer = None
        self.var_init_cls = var_init_cls
        self.indices_list = []
        self.variables = []
        self.expert_id_to_used_videos = {}
        self.default_rating = default_rating

        self.reset_model()
    def load(self, directory):
        """Load weights."""

        # print("Load weights")

        print_memory("ARWC:load_start")

        path = self._save_path(directory=directory)
        result = pickle.load(open(path, "rb"))

        print_memory("ARWC:pickle_loaded")

        # setting zero weights
        self.reset_model()

        print_memory("ARWC:model_reset_loaded")

        old_object_indices = {
            obj: idx
            for idx, obj in enumerate(result["objects"])
        }
        old_feature_indices = {
            feature: idx
            for idx, feature in enumerate(result["features"])
        }
        old_expert_indices = {
            expert: idx
            for idx, expert in enumerate(result["experts"])
        }

        restored_items = 0

        print_memory("ARWC:old_indices_loaded")
        # print("experts", len(self.experts), "objects", len(self.objects),
        #       "features", len(self.output_features))

        to_assign_idx = []
        to_assign_vals = []

        print_memory("ARWC:start_assign_append_loop")

        if "type" not in result:
            logging.warning(
                "Old checkpoint (without 'type') is found and not loaded.")
            return {}

        def get_old_data(expert_id, object_id, feature_id):
            if result["type"] == "dense":
                return result["data"][expert_id, object_id, feature_id]
            elif result["type"] == "sparse":
                layer = result["layer"]
                try:
                    idx = layer["idx"].get_key(
                        (expert_id, object_id, feature_id))
                    return result["data"][idx]
                except KeyError:
                    return np.nan
            else:
                raise NotImplementedError

        for new_expert_idx, expert in enumerate(
                tqdmem(self.experts,
                       desc="rating_load_expert_loop",
                       leave=True)):
            old_expert_idx = old_expert_indices.get(expert, None)
            old_obj_idxes = set(result["expert_id_to_used_videos"].get(
                old_expert_idx, []))
            if old_expert_idx is None:
                continue

            for new_obj_idx, obj in enumerate(
                    tqdmem(
                        self.objects,
                        desc="rating_load_object_loop",
                        leave=False,
                        disable=True,
                    )):
                old_obj_idx = old_object_indices.get(obj, None)
                if old_obj_idx is None or old_obj_idx not in old_obj_idxes:
                    continue

                for new_f_idx, feature in enumerate(self.output_features):
                    old_f_idx = old_feature_indices.get(feature, None)
                    if old_f_idx is None:
                        continue

                    if all([
                            x is not None
                            for x in [old_expert_idx, old_obj_idx, old_f_idx]
                    ]):
                        val = get_old_data(old_expert_idx, old_obj_idx,
                                           old_f_idx)

                        if not np.isnan(val):
                            to_assign_idx.append(
                                (new_expert_idx, new_obj_idx, new_f_idx))
                            to_assign_vals.append(val)
                            restored_items += 1

        print_memory("ARWC:finish_assign_append_loop")

        if to_assign_idx:
            print_memory("ARWC:start_create_layer_variable")

            if result["type"] == "dense":
                self.layer.v = tf.Variable(
                    tf.tensor_scatter_nd_update(self.layer.v, to_assign_idx,
                                                to_assign_vals),
                    trainable=True,
                )
            elif result["type"] == "sparse":
                to_assign_idx_flat = self.layer.idx.get_keys(to_assign_idx)

                assert len(to_assign_idx_flat) == len(to_assign_vals), (
                    to_assign_idx_flat,
                    to_assign_vals,
                )

                #                print(to_assign_idx_flat, to_assign_vals, self.layer.v)

                to_assign_idx_flat = [[x] for x in to_assign_idx_flat]

                logging.warning(f"Restoring {len(to_assign_vals)} values...")

                self.layer.v = tf.Variable(
                    tf.tensor_scatter_nd_update(self.layer.v,
                                                to_assign_idx_flat,
                                                to_assign_vals),
                    trainable=True,
                )
            else:
                raise NotImplementedError

            print_memory("ARWC:finish_create_layer_variable")

        # print(to_assign_idx, to_assign_vals)
        # print(self.layer.v)

        print_memory("ARWC:alive")

        return {"restored_items": restored_items}
Exemplo n.º 7
0
    def __init__(
        self,
        directory=None,
        load=True,
        save=True,
        user_queryset=None,
        video_queryset=None,
        users_to_ratings=None,
        features=None,
    ):
        # determining the directory to save results to
        if directory is None:
            directory = os.path.join(BASE_DIR, "..", ".models")
        self.directory = directory
        os.makedirs(self.directory, exist_ok=True)

        print_memory("DPL:init")

        # all users
        self.user_queryset = (UserPreferences.objects.all()
                              if user_queryset is None else user_queryset)
        self.users = [x.id for x in self.user_queryset]

        print_memory("DPL:users_loaded")

        # is the user certified?
        # if not, will not be used for aggregation

        def is_certified(user_pref_id):
            if COUNT_UNCERTIFIED_USERS:
                return True
            else:
                obj = get_object_or_None(
                    UserInformation, user__userpreferences__id=user_pref_id)
                return obj.is_certified if obj is not None else False

        self.user_certified = [is_certified(user) for user in self.users]

        print_memory("DPL:is_certified_all")

        # all videos
        self.video_queryset = (Video.objects.all()
                               if video_queryset is None else video_queryset)
        self.videos = [x.video_id for x in self.video_queryset]
        self.videos_set = set(self.videos)

        print_memory("DPL:all_videos_loaded")

        # user -> all expert rating array
        self.users_to_ratings = ({
            user: ExpertRating.objects.filter(user=user)
            for user in self.users
        } if users_to_ratings is None else users_to_ratings)

        print_memory("DPL:users_ratings_loaded")

        for u in self.users:
            assert (u in self.users_to_ratings
                    ), f"users_to_ratings must contain a user {u}"

        print_memory("DPL:user_rating_check_ok")

        # models and aggregator (empty)
        self.user_to_model = {user: None for user in self.users}

        print_memory("DPL:models_initialized")

        # the aggregator (will be created
        self.aggregator = None

        # creating the list of features
        if features is None:
            features = VIDEO_FIELDS

        assert isinstance(features, list), features
        assert all([f in VIDEO_FIELDS
                    for f in features]), (features, VIDEO_FIELDS)

        self.features = features

        print_memory("DPL:pre_model_create")

        print(f"Learner uses features {self.features}")

        # actually creating the models
        # aggregator is set here
        self.create_models()

        print_memory("DPL:models_created")

        # load/save variables
        self.save_after_train = save

        # loading weights if requested
        if load:
            self.load()

        print_memory("DPL:weights_loaded")

        self.train_counter = 0
        self.stats = {}

        print_memory("DPL:READY")