Exemplo n.º 1
0
    def test_with_name_none(self):
        c = {'x': 1}

        @configurable(ConfigsView(c), with_name=True)
        def foo(x, name='name'):
            return x, name

        self.assertEqual(foo(), (1, 'name'))
Exemplo n.º 2
0
    def test_not_existed_key(self):
        config = dict()
        cv = ConfigsView(config)

        @configurable(cv['foo'])
        def foo(a, b=2):
            return [a, b]

        self.assertEqual(foo(1), [1, 2])
Exemplo n.º 3
0
    def test_no_args(self):
        def foo(a, b, *, c, d, e=4):
            pass

        result = parse_configs(foo,
                               _config_object=ConfigsView({
                                   'a': 0,
                                   'b': 1,
                                   'c': 2
                               }),
                               d=3)
        self.assertEqual(result.args, (0, 1))
        self.assertEqual(result.kwargs, {'c': 2, 'd': 3, 'e': 4})
Exemplo n.º 4
0
    def test_basic(self):
        def foo(a, b, *, c, d, e=4):
            pass

        result = parse_configs(foo,
                               0,
                               _config_object=ConfigsView({
                                   'b': 1,
                                   'c': 2
                               }),
                               d=3)
        expect = {'a': 0, 'b': 1, 'c': 2, 'd': 3, 'e': 4}
        args = (0, 1)
        kwargs = {'c': 2, 'd': 3, 'e': 4}
        self.assertEqual(args, result.args)
        self.assertEqual(kwargs, result.kwargs)
Exemplo n.º 5
0
    def test_class_init(self):
        config = {'a': 1, 'b': 2}
        cv = ConfigsView(config)

        class A:
            @configurable(cv)
            def __init__(self, a):
                self.a = a

        class B(A):
            @configurable(cv)
            def __init__(self, b):
                super().__init__()
                self.b = b

        b = B()
        self.assertEqual(b.a, 1)
        self.assertEqual(b.b, 2)
Exemplo n.º 6
0
    def test_class_init_with_name(self):
        config = {'obj1': {'a': 1, 'b': 2}, 'obj2': {'a': 3, 'b': 4}}
        cv = ConfigsView(config)

        class A:
            @configurable(cv, with_name=True)
            def __init__(self, a, name):
                self.a = a

        class B(A):
            @configurable(cv, with_name=True)
            def __init__(self, b, name):
                super().__init__(name=name)
                self.b = b

        ob1 = B(name='obj1')
        ob2 = B(name='obj2')
        self.assertEqual(ob1.a, 1)
        self.assertEqual(ob1.b, 2)
        self.assertEqual(ob2.a, 3)
        self.assertEqual(ob2.b, 4)
Exemplo n.º 7
0
def get_configs_view():
    from dxpy.configs import ConfigsView
    return ConfigsView(config)
Exemplo n.º 8
0
 def test_inherence_2(self):
     c = {'k1': {'k2': {'k3': 'v1'}, 'k4': 'v2'}}
     cv = ConfigsView(c, 'k1/k2/k3')
     self.assertEqual(cv['k4'], 'v2')
Exemplo n.º 9
0
 def test_none_path(self):
     cv = ConfigsView(self.c)
     self.assertIsNone(cv['aaa/bbb'])
Exemplo n.º 10
0
 def test_none_inherence(self):
     cv = ConfigsView(self.c)
     self.assertIsNone(cv.get('aaa')['bbb'])
Exemplo n.º 11
0
def data_type_tf(key):
    if key == 'id':
        return tf.int64
    else:
        return tf.float32


def data_shape(key):
    return {
        'sinogram': [640, 128 - 2 * CROP_OFFSET],
        'id': [],
    }[key]


@configurable(ConfigsView(config, 'datasets/mice_sinograms'))
def _h5_file(path: str, file_name=DEFAULT_FILE_NAME):
    from dxpy.core.path import Path
    return str(Path(path) / file_name)


def _load_sample(idx, data):
    result = {'id': idx, 'sinogram': data[idx, ...].T}
    return result


def _processing(result):
    import numpy as np
    result['sinogram'] = np.concatenate(
        [result['sinogram']] * 2)[:, CROP_OFFSET:-CROP_OFFSET]
    return result
Exemplo n.º 12
0
# from ..config import get_configs_view, config
from dxpy.learn.config import config
from .base import NodeKeys
import warnings


def mean_square_error(label, data):
    with tf.name_scope('mean_squared_error'):
        return tf.sqrt(tf.reduce_mean(tf.square(label - data)))


def l1_error(label, data):
    with tf.name_scope('l1_error'):
        return tf.reduce_mean(tf.abs(label - data))

@configurable(ConfigsView(config).get('poission_loss'))
def poission_loss(label, data, *, compute_full_loss=False):
    with tf.name_scope('poission_loss'):
        label = tf.maximum(label, 0.0)
        data = tf.maximum(data, 0.0)
        # return log_possion_loss(tf.log(label), data)
        return tf.reduce_mean(tf.keras.losses.poisson(label, data))


def log_possion_loss(log_label, data, *, compute_full_loss=False):
    """
    log_label: log value of expectation (inference)
    data: Poission sample
    """
    with tf.name_scope('log_poission_loss'):
        data = tf.maximum(data, 0.0)
Exemplo n.º 13
0
 def test_basepath2(self):
     cv = ConfigsView(self.c, 'k3/k3_1')
     self.assertEqual(cv['k3_3_1'], 'v_3')
Exemplo n.º 14
0
 def test_basepath1(self):
     cv = ConfigsView(self.c, 'k2')
     self.assertEqual(cv['k2_1'], 'v2_1')
Exemplo n.º 15
0
 def test_basic_dict(self):
     cv = ConfigsView(self.c)
     self.assertEqual(cv['k1'], 'v1')
     self.assertEqual(cv['k2']['k2_1'], 'v2_1')
     self.assertEqual(cv['k2']['k2_2'], 'v2_2')
     self.assertEqual(cv['k3']['k3_1']['k3_3_1'], 'v_3')
Exemplo n.º 16
0
def infer_mct(dataset, nb_samples, output):
    """
    Use network in current directory as input for inference
    """
    import tensorflow as tf
    from dxpy.learn.dataset.api import get_dataset
    from dxpy.learn.net.api import get_network
    from dxpy.configs import ConfigsView
    from dxpy.learn.config import config
    import numpy as np
    import yaml
    from dxpy.debug.utils import dbgmsg
    print('Using dataset file:', dataset)
    data_raw = np.load(dataset)
    data_raw = {k: np.array(data_raw[k]) for k in data_raw.keys()}
    config_view = ConfigsView(config)

    def data_key(nd):
        return 'image{}x'.format(2**nd)

    def tensor_shape(key):
        shape_origin = data_raw[key].shape
        return [1] + list(shape_origin[1:3]) + [1]

    with tf.name_scope('inputs'):
        keys = ['input/image{}x'.format(2**i) for i in range(4)]
        keys += ['label/image{}x'.format(2**i) for i in range(4)]
        dataset = {
            k: tf.placeholder(tf.float32, tensor_shape(data_key(i % 4)))
            for i, k in enumerate(keys)
        }

    network = get_network('network/srms', dataset=dataset)
    nb_down_sample = network.param('nb_down_sample')
    config = tf.ConfigProto()
    config.gpu_options.allow_growth = True
    sess = tf.train.MonitoredTrainingSession(checkpoint_dir='./save',
                                             config=config,
                                             save_checkpoint_secs=None)

    STAT_MEAN = 9.93
    STAT_STD = 7.95
    STAT_MEAN_LOW = 9.93 * (4.0**nb_down_sample)
    STAT_STD_LOW = 7.95 * (4.0**nb_down_sample)
    BASE_SHAPE = (384, 384)

    def crop_image(tensor, target_shape=None):
        if target_shape is None:
            target_shape = BASE_SHAPE
        if len(tensor.shape) == 4:
            tensor = tensor[0, :, :, 0]
        # o1 = (tensor.shape[0] - target_shape[0]) // 2
        o1 = tensor.shape[0] // 2
        o2 = (tensor.shape[1] - target_shape[1]) // 2
        return tensor[o1:o1 + target_shape[0], o2:-o2]

    input_key = data_key(nb_down_sample)
    dbgmsg('input_key:', input_key)

    def run_infer(idx):
        low_phan = np.reshape(data_raw[input_key][idx, :, :],
                              tensor_shape(input_key))
        # low_phan = (low_phan - STAT_MEAN) / STAT_STD
        feeds = {dataset['input/image{}x'.format(2**nb_down_sample)]: low_phan}
        inf, itp = sess.run(
            [network['outputs/inference'], network['outputs/interp']],
            feed_dict=feeds)
        infc = crop_image(inf)
        itpc = crop_image(itp)
        infc = infc * STAT_STD_LOW + STAT_MEAN_LOW
        itpc = itpc * STAT_STD_LOW + STAT_MEAN_LOW
        return infc, itpc

    phans = []
    img_highs = []
    img_lows = []
    img_itps = []
    img_infs = []
    NB_MAX = data_raw['phantom'].shape[0]
    for idx in tqdm(range(nb_samples), ascii=True):
        if idx > NB_MAX:
            import sys
            print(
                'Index {} out of range {}, stop running and store current result...'
                .format(idx, NB_MAX),
                file=sys.stderr)
            break

        phans.append(data_raw['phantom'][idx, ...])
        img_high = crop_image(data_raw[data_key(0)][idx, :, :])
        img_high = img_high * STAT_STD + STAT_MEAN
        # img_high = img_high * STAT_STD / \
        # (4.0**nb_down_sample) + STAT_MEAN / (4.0**nb_down_sample)
        img_highs.append(img_high)
        img_low = crop_image(data_raw[data_key(nb_down_sample)][idx, ...],
                             [s // (2**nb_down_sample) for s in BASE_SHAPE])
        img_low = img_low * STAT_STD_LOW + STAT_MEAN_LOW
        img_lows.append(img_low)
        img_inf, img_itp = run_infer(idx)
        img_infs.append(img_inf)
        img_itps.append(img_itp)

    img_highs = np.array(img_highs)
    img_infs = np.array(img_infs) / (4.0**nb_down_sample)
    img_itps = np.array(img_itps) / (4.0**nb_down_sample)
    img_lows = np.array(img_lows) / (4.0**nb_down_sample)

    results = {
        'phantom': phans,
        'sino_itps': img_itps,
        'sino_infs': img_infs,
        'sino_highs': img_highs,
        'sino_lows': img_lows
    }
    np.savez(output, **results)
Exemplo n.º 17
0
def infer_sino_sr(dataset, nb_samples, output):
    """
    Use network in current directory as input for inference
    """
    import tensorflow as tf
    from dxpy.learn.dataset.api import get_dataset
    from dxpy.learn.net.api import get_network
    from dxpy.configs import ConfigsView
    from dxpy.learn.config import config
    import numpy as np
    import yaml
    from dxpy.debug.utils import dbgmsg
    dbgmsg(dataset)
    data_raw = np.load(dataset)
    data_raw = {k: np.array(data_raw[k]) for k in data_raw.keys()}
    config_view = ConfigsView(config)

    def tensor_shape(key):
        shape_origin = data_raw[key].shape
        return [1] + list(shape_origin[1:3]) + [1]

    with tf.name_scope('inputs'):
        keys = ['input/image{}x'.format(2**i) for i in range(4)]
        keys += ['label/image{}x'.format(2**i) for i in range(4)]
        dataset = {
            k: tf.placeholder(tf.float32, tensor_shape(k))
            for k in keys
        }

    network = get_network('network/srms', dataset=dataset)
    nb_down_sample = network.param('nb_down_sample')
    config = tf.ConfigProto()
    config.gpu_options.allow_growth = True
    sess = tf.train.MonitoredTrainingSession(checkpoint_dir='./save',
                                             config=config,
                                             save_checkpoint_secs=None)

    STAT_STD = 9.27
    STAT_MEAN = 9.76
    BASE_SHAPE = (640, 320)

    dataset_configs = config_view['dataset']['srms']
    with_noise = dataset_configs['with_poission_noise']
    if with_noise:
        PREFIX = 'input'
    else:
        PREFIX = 'label'

    def crop_sinogram(tensor, target_shape=None):
        if target_shape is None:
            target_shape = BASE_SHAPE
        if len(tensor.shape) == 4:
            tensor = tensor[0, :, :, 0]
        o1 = (tensor.shape[0] - target_shape[0]) // 2
        o2 = (tensor.shape[1] - target_shape[1]) // 2
        return tensor[o1:-o1, o2:-o2]

    def run_infer(idx):
        input_key = '{}/image{}x'.format(PREFIX, 2**nb_down_sample)
        low_sino = np.reshape(data_raw[input_key][idx, :, :],
                              tensor_shape(input_key))
        low_sino = (low_sino - STAT_MEAN) / STAT_STD
        feeds = {dataset['input/image{}x'.format(2**nb_down_sample)]: low_sino}
        inf, itp = sess.run(
            [network['outputs/inference'], network['outputs/interp']],
            feed_dict=feeds)
        infc = crop_sinogram(inf)
        itpc = crop_sinogram(itp)
        infc = infc * STAT_STD + STAT_MEAN
        itpc = itpc * STAT_STD + STAT_MEAN
        return infc, itpc

    phans = []
    sino_highs = []
    sino_lows = []
    sino_itps = []
    sino_infs = []
    NB_MAX = data_raw['phantom'].shape[0]
    for idx in tqdm(range(nb_samples), ascii=True):
        if idx > NB_MAX:
            import sys
            print(
                'Index {} out of range {}, stop running and store current result...'
                .format(idx, NB_MAX),
                file=sys.stderr)
            break

        phans.append(data_raw['phantom'][idx, ...])
        sino_highs.append(
            crop_sinogram(data_raw['{}/image1x'.format(PREFIX)][idx, :, :]))
        sino_lows.append(
            crop_sinogram(
                data_raw['{}/image{}x'.format(PREFIX, 2**nb_down_sample)][idx,
                                                                          ...],
                [s // (2**nb_down_sample) for s in BASE_SHAPE]))
        sino_inf, sino_itp = run_infer(idx)
        sino_infs.append(sino_inf)
        sino_itps.append(sino_itp)

    results = {
        'phantom': phans,
        'sino_itps': sino_itps,
        'sino_infs': sino_infs,
        'sino_highs': sino_highs,
        'sino_lows': sino_lows
    }
    np.savez(output, **results)
Exemplo n.º 18
0
 def test_inherence(self):
     cv = ConfigsView(self.c, 'k3/k3_1')
     self.assertEqual(cv['k3_2'], 'v3_0')
Exemplo n.º 19
0
from dxpy.configs import ConfigsView
default_config = {
    'backend': 'astra',
    'astra': {},
    'projection': dict(),
    'reconstruction': dict()
}
config = ConfigsView(default_config)
Exemplo n.º 20
0
 def test_name(self):
     cv = ConfigsView(self.c)
     self.assertEqual(cv['k2/k2_1'], 'v2_1')
Exemplo n.º 21
0
from pprint import pprint

import numpy as np
import tensorflow as tf
from dxpy.learn.session import Session
from dxpy.learn.train.summary import SummaryWriter
from dxpy.learn.utils.general import pre_work
from tqdm import tqdm
from dxpy.learn.config import config
import yaml
from dxpy.configs import configurable, ConfigsView


@configurable(ConfigsView(config).get('train'))
def get_train_configs(summary_freq=1000, save_freq=10000, steps=100000000):
    return {'summary_freq': summary_freq,
            'save_freq': save_freq,
            'steps': steps}


def train(definition_func):
    with open('dxln.yml') as fin:
        ycfg = yaml.load(fin)
    config.update(ycfg)
    pre_work()
    train_cfgs = get_train_configs()
    steps = train_cfgs['steps']
    summary_freq = train_cfgs['summary_freq']
    save_freq = train_cfgs['save_freq']
    network, summary = definition_func(ycfg)
    session = Session()
Exemplo n.º 22
0
 def test_none(self):
     cv = ConfigsView(self.c)
     self.assertIsNone(cv['aaa'])