Пример #1
0
def parse_mixin(less, parent=None, **kwargs):
    match = MIXIN.match(less)

    if not match:
        raise ValueError()

    code = match.group()

    contents = None

    if match.group('nested'):
        matched_length = len(match.group())

        remaining_less = less[matched_length:]

        contents = parse_nested(remaining_less)

        code += contents + '}'

    params = parse_params(match.group('params'))

    if contents:
        for param in params:
            if param['value'] and not param['name']:
                param['name'] = param['value']
                param['value'] = None

    name = match.group('name')

    if match.group('nested') and not match.group('param_detect'):
        raise ValueError()

    return Mixin(parent=parent, code=code, name=name, params=params,
                 contents=contents)
Пример #2
0
    def __init__(self, initial_param_file, fasta_file):
        """
        Initialize TSequence to the dataset you are interested in investigating
        Can only be called with the following strings `s`:
        'mu', '2mu', or '5mu'

        e.g. TSequence('mu')
        """
        self.sequences = read_fasta_sequences_to_str(fasta_file)
        self.obs = observe_differences(self.sequences[0], self.sequences[1])
        self.theta = parse_params(initial_param_file)
        self.estimate = None
        self.likelihood = None
        self.initial_likelihood = None
Пример #3
0
def parse_mixin(less, parent=None, **kwargs):
    match = MIXIN.match(less)

    if not match:
        raise ValueError()

    code = match.group()

    contents = None

    if match.group('nested'):
        matched_length = len(match.group())

        remaining_less = less[matched_length:]

        contents = parse_nested(remaining_less)

        code += contents + '}'

    params = parse_params(match.group('params'))

    if contents:
        for param in params:
            if param['value'] and not param['name']:
                param['name'] = param['value']
                param['value'] = None

    name = match.group('name')

    if match.group('nested') and not match.group('param_detect'):
        raise ValueError()

    return Mixin(parent=parent,
                 code=code,
                 name=name,
                 params=params,
                 contents=contents)
Пример #4
0
import socket
import sys

import params
from framedSock import file_send

storage_dir = 'clientFiles/'  # directory used to send/receive files

switches_var_defaults = (
    (('-s', '--server'), 'server', '127.0.0.1:50001'),
    (('-d', '--debug'), 'debug', False),  # boolean (set if present)
    (('-?', '--usage'), 'usage', False),  # boolean (set if present)
)

prog_name = 'file_client'
param_map = params.parse_params(switches_var_defaults)
server, usage, debug = param_map['server'], param_map['usage'], param_map[
    'debug']

if usage:
    params.usage()

try:
    server_host, server_port = re.split(':', server)
    server_port = int(server_port)
except:
    print('Could not parse server:port from ', server)
    sys.exit(1)

s = None
for res in socket.getaddrinfo(server_host, server_port, socket.AF_UNSPEC,
Пример #5
0
def main():
    # Parse args
    args = parse_cmdline()
    params = parse_params(args.template_path, args.param_path)

    # TODO: Check if number of available GPUs is same as number specified

    # No-reference quality metrics
    if params.experiment.train_mode == 'noref':
        print('### Experiment Type x (no ref) ###')
        # Get metadata for HOG, None for CNN
        for param_val in getattr(params.sensor, params.experiment.sat_param):
            qual_pipeline = QualityPipeline(params, 'full', None, param_name=params.experiment.sat_param, param_val=param_val)
            qual_pipeline.test()

    # Normalization
    elif params.experiment.train_mode == 'norm':
        print('### Experiment Type 2 (norm) ###')
        for fold_index in range(params.experiment.num_folds):
            # Get metadata for HOG, None for CNN
            for param_val in getattr(params.sensor, params.experiment.sat_param):
                norm_pipeline = NormPipeline(params, 'full', fold_index, param_name=params.experiment.sat_param, param_val=param_val)
                feat_mean, feat_std = norm_pipeline.norm()

    # Experiment type 0
    elif params.experiment.train_mode == 'basic':
        print('### Experiment Type 0 ###')
        for fold_index in range(params.experiment.num_folds):
            if params.model.method == 'hog':
                norm_pipeline = NormPipeline(params, 'train', fold_index)
                feat_mean, feat_std = norm_pipeline.norm()
            else:
                feat_mean, feat_std = None, None
            train_pipeline = TrainPipeline(params, 'train', fold_index, feat_mean=feat_mean, feat_std=feat_std)
            test_pipeline = TestPipeline(params, 'val', fold_index, feat_mean=feat_mean, feat_std=feat_std)
            train_pipeline.save()
            test_pipeline.test(0)
            for epoch_num in range(1, params.learning.num_epochs+1):
                train_pipeline.train(save_flag=True)
                test_pipeline.test(epoch_num)

    # Experiment type 1
    elif params.experiment.train_mode == 'orig':
        print('### Experiment Type 1 ###')
        mean_list, std_list = [], []
        for fold_index in range(params.experiment.num_folds):
            # Get metadata for HOG, None for CNN
            if params.model.method == 'hog':
                norm_pipeline = NormPipeline(params, 'train', fold_index)
                feat_mean, feat_std = norm_pipeline.norm()
                mean_list.append(feat_mean)
                std_list.append(feat_std)
            else:
                mean_list.append(None), std_list.append(None)
            # Create train pipeline
            train_pipeline = TrainPipeline(params, 'train', fold_index, 
                feat_mean=mean_list[fold_index], feat_std=std_list[fold_index])
            train_pipeline.save()
            for epoch_num in range(params.learning.num_epochs):
                train_pipeline.train(save_flag=True)

        for fold_index in range(params.experiment.num_folds):
            for param_val in getattr(params.sensor, params.experiment.sat_param):
                test_pipeline = TestPipeline(params, 'val', fold_index, 
                    param_name=params.experiment.sat_param, param_val=param_val, 
                    feat_mean=mean_list[fold_index], feat_std=std_list[fold_index])
                for epoch_num in range(params.learning.num_epochs+1):
                    test_pipeline.test(epoch_num)

    # Experiment type 3
    elif args.test_param_path is not None:
        feat_mean, feat_std = None, None
        test_params = parse_params(args.template_path, args.test_param_path)
        print('### Experiment Type 3 ###')
        # Get metadata for HOG, None for CNN
        for param_val in getattr(params.sensor, params.experiment.sat_param):
            for train_fold_index in range(params.experiment.num_folds):
                load_pipeline = TestPipeline(params, 'val', train_fold_index, 
                    param_name=params.experiment.sat_param, param_val=param_val,
                    feat_mean=feat_mean, feat_std=feat_std)
                model_path = load_pipeline.get_path(5)
                for test_fold_index in range(test_params.experiment.num_folds):
                    test_pipeline = TestPipeline(test_params, 'val', test_fold_index, 
                        param_name=params.experiment.sat_param, param_val=param_val,
                        feat_mean=feat_mean, feat_std=feat_std, train_fold_index=train_fold_index)
                    test_pipeline.test_map(0, model_path=model_path)

    # Experiment type 2
    elif params.experiment.train_mode == 'degrade':
        print('### Experiment Type 2 ###')
        for fold_index in range(params.experiment.num_folds):
            # Get metadata for HOG, None for CNN
            for param_val in getattr(params.sensor, params.experiment.sat_param):
                if params.model.method == 'hog':
                    norm_pipeline = NormPipeline(params, 'train', fold_index, param_name=params.experiment.sat_param, param_val=param_val)
                    feat_mean, feat_std = norm_pipeline.norm()
                else:
                    feat_mean, feat_std = None, None
                train_pipeline = TrainPipeline(params, 'train', fold_index, param_name=params.experiment.sat_param, param_val=param_val,
                    feat_mean=feat_mean, feat_std=feat_std)
                test_pipeline = TestPipeline(params, 'val', fold_index, param_name=params.experiment.sat_param, param_val=param_val,
                    feat_mean=feat_mean, feat_std=feat_std)
                train_pipeline.save()
                test_pipeline.test(0)
                # Train for num_epochs
                for _ in range(1, params.learning.num_epochs+1):
                    epoch_num = train_pipeline.train(save_flag=True)
                    test_pipeline.test(epoch_num)

    # Invalid experiment type
    else:
        raise Exception('Invalid experiment train_mode: {}'.format(params.experiment.train_mode))
Пример #6
0
from sys import argv

import tensorflow as tf
import numpy as np

from data.preprocessing import get_dataset, augment
from unet.model import unet
from unet.loss import superloss
from unet.metrics import Dice, DiceX, DiceG, DiceM
from unet.utils import crop_size

from unet.preconf import conf

os.environ['TF_FORCE_GPU_ALLOW_GROWTH'] = 'true'

params = parse_params(argv[1:])
# initialization of the data split and classes

# the list of id for images used in training
imgs_train = params.imgs_train

# list of ids of the classes
terms = params.terms

# list of the id for images used in training to select the model to keep
imgs_val = params.imgs_val

# the list of id for images used in testing for which fullsize mask will be written
imgs_test = params.imgs_test
datadir = params.datadir
Пример #7
0
from ssn.ssn_dataset import SSN_Dataset
# from ssn.ssn_submodule import Contract
from ssn.ssn import Relight_SSN, baseline_2_tbaseline, baseline_2_touchloss
from utils.utils_file import get_cur_time_stamp, create_folder
from utils.net_utils import save_model, get_lr, set_lr
from utils.visdom_utils import setup_visdom, visdom_plot_loss, visdom_log, visdom_show_batch
from utils.tensorboard_utils import *
from torch.utils.tensorboard import SummaryWriter
from params import params as options, parse_params
import matplotlib.pyplot as plt
from evaluation import exp_predict, exp_metric
import pickle

# parse args
params = parse_params()
print("Params: {}".format(params))
exp_name = params.exp_name
# cur_viz = setup_visdom(params.vis_port)
tensorboard_folder = 'tensorboard_log/runs'
os.makedirs(tensorboard_folder, exist_ok=True)
timestamp = datetime.datetime.now().strftime("%Y%m%d-%H%M%S")
writer = SummaryWriter(
    join(tensorboard_folder, '{}_{}'.format(exp_name, timestamp)))

device = torch.device("cuda:0" if torch.cuda.is_available() else "cpu")

if params.cpu:
    device = torch.device('cpu')

print("Device: ", device)