Exemplo n.º 1
0
def train():
    """Train function."""
    args = get_args("train")
    if args.need_profiler:
        from mindspore.profiler.profiling import Profiler
        profiler = Profiler(output_path=args.outputs_dir,
                            is_detail=True,
                            is_show_op_path=True)
    ds = create_dataset(args)
    G_A = get_generator(args)
    G_B = get_generator(args)
    D_A = get_discriminator(args)
    D_B = get_discriminator(args)
    load_ckpt(args, G_A, G_B, D_A, D_B)
    imgae_pool_A = ImagePool(args.pool_size)
    imgae_pool_B = ImagePool(args.pool_size)
    generator = Generator(G_A, G_B, args.lambda_idt > 0)

    loss_D = DiscriminatorLoss(args, D_A, D_B)
    loss_G = GeneratorLoss(args, generator, D_A, D_B)
    optimizer_G = nn.Adam(generator.trainable_params(),
                          get_lr(args),
                          beta1=args.beta1)
    optimizer_D = nn.Adam(loss_D.trainable_params(),
                          get_lr(args),
                          beta1=args.beta1)

    net_G = TrainOneStepG(loss_G, generator, optimizer_G)
    net_D = TrainOneStepD(loss_D, optimizer_D)

    data_loader = ds.create_dict_iterator()
    reporter = Reporter(args)
    reporter.info('==========start training===============')
    for _ in range(args.max_epoch):
        reporter.epoch_start()
        for data in data_loader:
            img_A = data["image_A"]
            img_B = data["image_B"]
            res_G = net_G(img_A, img_B)
            fake_A = res_G[0]
            fake_B = res_G[1]
            res_D = net_D(img_A, img_B, imgae_pool_A.query(fake_A),
                          imgae_pool_B.query(fake_B))
            reporter.step_end(res_G, res_D)
            reporter.visualizer(img_A, img_B, fake_A, fake_B)
        reporter.epoch_end(net_G)
        if args.need_profiler:
            profiler.analyse()
            break

    reporter.info('==========end training===============')
Exemplo n.º 2
0
def predict():
    """Predict function."""
    args = get_args("predict")
    G_A = get_generator(args)
    G_B = get_generator(args)
    # Use BatchNorm2d with batchsize=1, affine=False, training=True instead of InstanceNorm2d
    # Use real mean and varance rather than moving_men and moving_varance in BatchNorm2d
    G_A.set_train(True)
    G_B.set_train(True)
    load_ckpt(args, G_A, G_B)

    imgs_out = os.path.join(args.outputs_dir, "predict")
    if not os.path.exists(imgs_out):
        os.makedirs(imgs_out)
    if not os.path.exists(os.path.join(imgs_out, "fake_A")):
        os.makedirs(os.path.join(imgs_out, "fake_A"))
    if not os.path.exists(os.path.join(imgs_out, "fake_B")):
        os.makedirs(os.path.join(imgs_out, "fake_B"))
    args.data_dir = 'testA'
    ds = create_dataset(args)
    reporter = Reporter(args)
    reporter.start_predict("A to B")
    for data in ds.create_dict_iterator(output_numpy=True):
        img_A = Tensor(data["image"])
        path_A = str(data["image_name"][0], encoding="utf-8")
        fake_B = G_A(img_A)
        save_image(fake_B, os.path.join(imgs_out, "fake_B", path_A))
    reporter.info('save fake_B at %s', os.path.join(imgs_out, "fake_B",
                                                    path_A))
    reporter.end_predict()
    args.data_dir = 'testB'
    ds = create_dataset(args)
    reporter.dataset_size = args.dataset_size
    reporter.start_predict("B to A")
    for data in ds.create_dict_iterator(output_numpy=True):
        img_B = Tensor(data["image"])
        path_B = str(data["image_name"][0], encoding="utf-8")
        fake_A = G_B(img_B)
        save_image(fake_A, os.path.join(imgs_out, "fake_A", path_B))
    reporter.info('save fake_A at %s', os.path.join(imgs_out, "fake_A",
                                                    path_B))
    reporter.end_predict()
Exemplo n.º 3
0
import os

from config import DATA_PATH, config_dict
import keras.backend as k
from keras.callbacks import ModelCheckpoint, TensorBoard, EarlyStopping
import numpy as np
import tensorflow as tf

from src.classifiers.cnn import CNN
from src.classifiers.mlp import MLP
from src.classifiers.resnet import ResNet
from src.classifiers.utils import save_classifier, load_classifier
from src.utils import get_args, get_verbose_print, load_dataset, make_directory

# --------------------------------------------------------------------------------------------------- SETTINGS
args = get_args(__file__, options="bcdefrsvz")

v_print = get_verbose_print(args.verbose)

comp_params = {
    "loss": 'categorical_crossentropy',
    "optimizer": 'adam',
    "metrics": ['accuracy']
}

# --------------------------------------------------------------------------------------------- GET CLASSIFIER

# Get dataset
(X_train, Y_train), (X_test, Y_test), _, _ = load_dataset(args.dataset)

if os.path.isfile(args.dataset):
Exemplo n.º 4
0
import numpy as np
import time
from statistics import mean, stdev

from sklearn.metrics import accuracy_score
from sklearn.metrics.pairwise import rbf_kernel

from src.lmvsvm import *
from src.missing_views import set_random_views_to_value
from src.utils import dict_to_csv, load_flower17, get_args, get_view_dict, twod_array, splits_generator, load_uwave

args = get_args(__file__)
recons = args.reconstr

DATASET = "uwave"
L = 200

ratios_missing = [0.05 * i for i in range(1, 11)]
c_range = [10**i for i in range(-3, 4)]
ratios_missing = [0.3]
c_range = [1]

X, Y, test_X, test_Y = load_uwave()

ITER = 2
PATH = "results/{}/missing/lmvsvm/{}".format(DATASET, recons)

print("learning on {}, missing views completed by {}. results saved in {}".
      format(DATASET, recons, PATH))

acc_list = []
Exemplo n.º 5
0
                break
            loss, acc = train_step(x, y_true)
            pbar.update(i + 1, [("loss", loss), ("accuracy", acc)],
                        finalize=False)

        # 5.4. test
        for x, y_true in test_ds:
            loss, acc = test_step(x, y_true)
        pbar.update(
            train_steps_per_epoch,
            [("test_loss", loss), ("test_accuracy", acc)],
            finalize=True,
        )

        # 5.5. write metrics to tensorboard
        with train_writer.as_default():
            tf.summary.scalar("Loss", train_loss_avg.result(), step=epoch)
            tf.summary.scalar("Acc", train_accuracy.result(), step=epoch)
        with test_writer.as_default():
            tf.summary.scalar("Loss", test_loss_avg.result(), step=epoch)
            tf.summary.scalar("Acc", test_accuracy.result(), step=epoch)


if __name__ == "__main__":
    args = get_args()
    print(args)
    if args.custom_train:
        custom_train(args)
    else:
        builtin_train(args)
import keras.backend as k
import tensorflow as tf

from src.attacks.carlini import CarliniL2Method
from src.attacks.deepfool import DeepFool
from src.attacks.fast_gradient import FastGradientMethod
from src.attacks.saliency_map import SaliencyMapMethod
from src.attacks.universal_perturbation import UniversalPerturbation
from src.attacks.virtual_adversarial import VirtualAdversarialMethod
from src.classifiers.utils import load_classifier

from src.utils import get_args, get_verbose_print, load_dataset, make_directory

# --------------------------------------------------------------------------------------------------- SETTINGS
args = get_args(__file__, load_classifier=True, options="adsv")
v_print = get_verbose_print(args.verbose)
alpha = 0.05  # constant for random perturbation

# get dataset
(X_train, Y_train), (X_test, Y_test), min_, max_ = load_dataset(args.dataset)

session = tf.Session()
k.set_session(session)

# Load classification model
MODEL_PATH = os.path.join(os.path.abspath(args.load), "")
classifier = load_classifier(MODEL_PATH, "best-weights.h5")

if args.save:
    SAVE_ADV = os.path.join(os.path.abspath(args.save), args.adv_method)
Exemplo n.º 7
0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ============================================================================
"""export file."""
import numpy as np

from mindspore import context, Tensor
from mindspore.train.serialization import export
from src.models import get_generator
from src.utils import get_args, load_ckpt

args = get_args("export")

context.set_context(mode=context.GRAPH_MODE, device_target=args.platform)

if __name__ == '__main__':
    G_A = get_generator(args)
    G_B = get_generator(args)
    # Use BatchNorm2d with batchsize=1, affine=False, training=True instead of InstanceNorm2d
    # Use real mean and varance rather than moving_men and moving_varance in BatchNorm2d
    G_A.set_train(True)
    G_B.set_train(True)
    load_ckpt(args, G_A, G_B)

    input_shp = [1, 3, args.image_size, args.image_size]
    input_array = Tensor(
        np.random.uniform(-1.0, 1.0, size=input_shp).astype(np.float32))