示例#1
0
def main(_):
    config = flags.FLAGS
    os.environ['CUDA_VISIBLE_DEVICES'] = config.gpu
    os.environ['TF_CPP_MIN_LOG_LEVEL'] = '3'
    extractor = utils.Extractor()
    extractor.Extract()
    loader = utils.Loader(cut_len)
    t_data = loader.load_trigger()
    trigger = Trigger_Model(t_data, loader.maxlen, loader.wordemb, config.mode)
    trigger.train_trigger()
示例#2
0
def main(_):
    config = flags.FLAGS
    os.environ['CUDA_VISIBLE_DEVICES'] = config.gpu
    extractor = utils.Extractor()
    extractor.Extract()
    loader = utils.Loader()
    t_data = loader.load_trigger()
    a_data = loader.load_argument()
    trigger = DMCNN(t_data,a_data,loader.maxlen,loader.max_argument_len,loader.wordemb)
    a_data_process = trigger.train_trigger()
    argument = DMCNN(t_data,a_data_process,loader.maxlen,loader.max_argument_len,loader.wordemb,stage=config.mode,classify=config.classify)
    argument.train_argument()
示例#3
0
def main(args):
    chainer.set_debug(True)
    # Initialize the model to train
    model = models.archs[args.arch]()
    if args.finetune and hasattr(model, 'finetuned_model_path'):
        utils.finetuning.load_param(model.finetuned_model_path, model,
                                    args.ignore)
    if args.initmodel:
        print('Load model from', args.initmodel)
        chainer.serializers.load_npz(args.initmodel, model)
    if args.gpu >= 0:
        chainer.cuda.get_device(args.gpu).use()  # Make the GPU current
        #cuda.cudnn_enabled = False
        model.to_gpu()

    nowt = datetime.datetime.today()
    outputdir = os.path.join(args.out, args.arch, 'extract')
    if args.initmodel is not None:
        outputdir = os.path.dirname(args.initmodel)
    # Load the datasets and mean file
    mean = None
    if hasattr(model, 'mean_value'):
        mean = makeMeanImage(model.mean_value)
    else:
        mean = np.load(args.mean)
    assert mean is not None

    val = ppds.PreprocessedDataset(args.val, args.root, mean, model.insize,
                                   False)
    val_iter = chainer.iterators.SerialIterator(val,
                                                args.val_batchsize,
                                                repeat=False,
                                                shuffle=False)

    # Set up an optimizer
    optimizer = chainer.optimizers.MomentumSGD()
    optimizer.setup(model)

    # Set up a trainer
    updater = training.StandardUpdater(val_iter, optimizer, device=args.gpu)
    trainer = training.Trainer(updater, (1, 'epoch'), outputdir)

    #val_interval = (10 if args.test else int(len(train) / args.batchsize)), 'iteration'
    val_interval = (1, 'iteration')
    #snapshot_interval = (10, 'iteration') if args.test else (2, 'epoch')
    #log_interval = (10, 'iteration')

    # Copy the chain with shared parameters to flip 'train' flag only in test
    eval_model = model.copy()
    eval_model.train = False
    val_extractor = utils.Extractor(val_iter, eval_model, device=args.gpu)
    val_extractor.layer_rank = eval_model.layer_rank[args.layer]
    val_extractor.layer_name = args.layer
    val_extractor.operation = args.operation
    val_extractor.save_features = args.savefeatures
    val_extractor.top = args.top
    if 'googlenet' in args.arch:
        val_extractor.lastname = 'validation/main/loss3'
    trainer.extend(val_extractor, trigger=val_interval)
    #trainer.extend(extensions.PrintReport([
    #    'epoch', 'iteration', 'main/loss', 'validation/main/loss',
    #    'main/accuracy', 'validation/main/accuracy',
    #]), trigger=log_interval)
    #trainer.extend(extensions.ProgressBar(update_interval=10))

    if args.resume:
        chainer.serializers.load_npz(args.resume, trainer)

    results = val_extractor(trainer)
    results['outputdir'] = outputdir

    return results
示例#4
0
#!/usr/bin/env python
import logging
import utils
import json

# Set log level to info
logging.basicConfig(level=logging.INFO)

# Load dataset description file
dataset_description = json.load(open("./dataset.json"))

# Download resource
downloader = utils.Downloader(dataset_description, "../../data/raw")
downloader.download()

# Extract resources
extractor = utils.Extractor(dataset_description, "../../data/raw",
                            "../../data/processed")
extractor.extract()