def apply_transforms(dataset, confs, batch_size): """ Apply transforms to the dataset. :param confs: A list of configuration like {"name": "scaling", ... }, indicating the specified transform. We use "--------" to separate the pre-batch-transform and after-batch-transform. :param batch_size: The batch size to batch the dataset :return: A new dataset after transform """ # Per point cloud instance transform pre_batch_confs = confs post_batch_confs = [] try: batch_seperator_index = [type(x) is str and "---" in x for x in confs].index(True) pre_batch_confs = confs[:batch_seperator_index] post_batch_confs = confs[batch_seperator_index + 1:] except ValueError: # Cannot find the index pass log("Origin dataset={}".format(dataset)) context = {"batch_size": batch_size} for conf in pre_batch_confs: transform = object_from_conf(conf, scope="transform", context=context) dataset = transform(dataset) log("After pre-batch transform \"{}\" with conf={}, dataset={}".format(conf["name"], conf, dataset)) dataset = dataset.batch(batch_size) log("Batch transform, dataset={}".format(dataset)) for conf in post_batch_confs: transform = object_from_conf(conf, scope="transform", context=context) dataset = transform(dataset) log("After post-batch transform \"{}\" with conf={}, dataset={}".format(conf["name"], conf, dataset)) return dataset
def learning_rate_from_config(learning_rate_conf): """ Get the learning rate scheduler based on configuration :param learning_rate_conf: The learning rate configuration :return: A learning rate scheduler """ return object_from_conf(learning_rate_conf, scope="learning_rate")
def optimizer_from_config(learning_rate, optimizer_conf): """ Get the optimizer from configuration :param learning_rate: The learning rate, might be a scalar or a learning rate schedule :param optimizer_conf: The optimizer configuration :return: An corresponding optimizer """ context = {"learning_rate": learning_rate} return object_from_conf(optimizer_conf, scope="optimizer", context=context)
def layer_from_config(layer_conf, model_conf, data_conf): """ Get the corresponding keras layer from configurations :param layer_conf: The layer configuration :param model_conf: The global model configuration, sometimes it is used to generate some special layer like "output-classification" and "output-segmentation" layer :param data_conf: The dataset configuration, for generating special layers :return: A keras layer """ # context = {"class_count": data_conf["class_count"]} return object_from_conf(layer_conf, scope="layer", context=None)