def launch_servers(config_fn, which_set=['train', 'valid'], verbose=False):
    """"""
    global DEFAULT_PORT

    config = load_config(config_fn)
    data_servers = {}
    ports = {}
    port = DEFAULT_PORT

    for target in config.target:
        data_servers[target] = {}
        ports[target] = {}

        for dset in which_set:
            if dset == 'valid':
                ports[target][dset] = port + 1
            else:
                ports[target][dset] = port

            args = [
                'python', '-m', 'data_server.server', '--target', target,
                '--which-set', dset, '--port',
                str(ports[target][dset]), '--config-fn', config_fn
            ]

            if verbose:
                data_servers[target][dset] = subprocess.Popen(args)
            else:
                with open(os.devnull, 'w') as devnull:
                    data_servers[target][dset] = subprocess.Popen(
                        args, stdout=devnull, stderr=devnull)
        port += 10

    return data_servers, ports
Esempio n. 2
0
def reload_config():
    global c
    global label_config_line
    global analytics
    c = load_config()
    label_config_line = config_line_stripped(open(c['label_config']).read())
    if analytics is None:
        analytics = Analytics(label_config_line,
                              c.get('collect_analytics', True))
    else:
        analytics.update_info(label_config_line,
                              c.get('collect_analytics', True))
Esempio n. 3
0
def initiate_data_server(target, which_set, port, config_fn):
    """
    """
    config = load_config(config_fn)

    print('Initialize Data Server...')
    # dataset = MSD(
    #     target=target,
    #     which_set=which_set,
    #     config=config,
    # )
    dataset = MSDMel(target=target, which_set=which_set, config=config)
    launch_data_server(dataset, port, config)
Esempio n. 4
0
    def __init__(self, model_fn, task, out_dir=None, hop_sz=1.):
        """"""
        super(MTLExtractor, self).__init__(task, out_dir, hop_sz, prob=True)

        # load configuration for model
        if os.path.exists(model_fn):
            model_id = os.path.splitext(os.path.basename(model_fn))[0]
            self.model_id = model_id.split('_state')[0]
            model_state = joblib.load(model_fn)
            self.config = namedtupled.map(model_state['config'])
        else:
            self.model_id = 'rnd'
            # load default config and change task as rand
            self.config = load_config('config/config.example.json')
            self.config.target[0] = 'rand'

        self.out_fn = os.path.join(
            self.root, self.model_id + '_{}_feature.h5'.format(self.task))

        self.targets = self.config.target

        # load model
        self.model = Model(self.config)

        # variable set up
        self.sr = self.config.hyper_parameters.sample_rate
        self.length = self.config.hyper_parameters.patch_length
        self.n_fft = self.config.hyper_parameters.n_fft
        self.hop_sz_trn = self.config.hyper_parameters.hop_size

        self.input = self.config.hyper_parameters.input
        self.hop = int(self.hop_sz * self.sr)
        sig_len = int(self.sr * self.length)
        self.sig_len = sig_len - sig_len % self.hop_sz_trn

        # prepare preprocessor if needed
        if self.config.hyper_parameters.input == 'melspec':
            self.melspec = MelSpectrogramGPU(2, self.sr, self.n_fft,
                                             self.hop_sz_trn)

        # set feature layer names
        branch_at = self.config.hyper_parameters.branch_at
        if isinstance(branch_at, (int, float)):
            self.feature_layers = ['{}.fc'.format(t) for t in self.targets]

        elif isinstance(branch_at, (str, unicode)) and branch_at == "fc":
            self.feature_layers = ['fc']

        self._prepare_db()
        super(MTLExtractor, self).post_init()
        self.hf.attrs['targets'] = [t.encode() for t in self.targets]
Esempio n. 5
0
def main(config_fn, data_verbose=False):
    """"""
    # load config file
    config_fn = os.path.abspath(config_fn)
    config = load_config(config_fn)

    # launch data servers
    with data_context(config_fn,
                      which_set=['train', 'valid'],
                      verbose=data_verbose) as data_streams:

        # initialize trainer
        trainer = Trainer(config, data_streams)

        # train
        trainer.fit()
def data_context(config_fn, which_set, verbose=False):
    """"""
    config = load_config(config_fn)

    # check remote
    if hasattr(config.data_server, 'ports'):
        ports = namedtupled.reduce(config.data_server.ports)
        remote_server = True
    else:
        servers, ports = launch_servers(config_fn, which_set, verbose)
        remote_server = False
    streams = StreamManager(ports, config)

    yield streams

    # tear down data servers only when it's local
    if not remote_server:
        kill_servers(servers)
Esempio n. 7
0
def test_model(config_fn, out_dir):
    """"""
    config = load_config(config_fn)
    tasks = config.target
    model = Model(config)

    y, sr = load_test_audio(config)
    x = np.repeat(y[None, None, 55040*2:55040*3], 2, axis=1)
    X = np.array(
        [np.abs(librosa.stft(y_, n_fft=1024, hop_length=256))
         for y_ in x[0]])
    np.save(os.path.join(out_dir, 'test_input.npy'), X)

    Z = {}
    for task in tasks:
        Z[task] = model.predict(task, x)
        np.save(
            os.path.join(
                out_dir, 'test_recon_{}.npy'.format(task)),
            Z[task]
        )
Esempio n. 8
0
def reload_config():
    global c
    global label_config_line
    global analytics
    global ml_backend
    global project
    c = load_config()

    label_config_line = config_line_stripped(open(c['label_config']).read())
    if analytics is None:
        analytics = Analytics(label_config_line,
                              c.get('collect_analytics', True))
    else:
        analytics.update_info(label_config_line,
                              c.get('collect_analytics', True))
    # configure project
    if project is None:
        project = Project(label_config=label_config_line)
    # configure machine learning backend
    if ml_backend is None:
        ml_backend_params = c.get('ml_backend')
        if ml_backend_params:
            ml_backend = MLBackend.from_params(ml_backend_params)
            project.connect(ml_backend)
def test():
    cfg = load_config('../utils/configurations/simulated_camera_24bit.json')

    # luminance test
    inputs = {'luminance': np.linspace(0.01, 10000, 100),  # 0.01 to 10000 cd/m^2
              'f_num': 8, 'exposure_time': 0.01, 'iso': 100}
    unit_test(cfg, inputs)

    # F-number test
    inputs = {'f_num': np.logspace(1, 4, 10, base=2),  # 2 to 16 with 0.3 stop
              'luminance': 1000, 'exposure_time': 0.01, 'iso': 100}
    unit_test(cfg, inputs)

    # exposure time test
    inputs = {'exposure_time': np.linspace(0.001, 0.5, 100),  # 0.001 to 0.5 second
              'luminance': 1000, 'f_num': 16, 'iso': 100}
    unit_test(cfg, inputs)

    # ISO speed test
    inputs = {'iso': 100 * np.logspace(0, 4, 13, base=2),  # ISO100 to ISO1600
              'luminance': 1000, 'f_num': 8, 'exposure_time': 0.005}
    unit_test(cfg, inputs)

    plt.show(block=False)
Esempio n. 10
0
import os

import tifffile

from simulation.simulated_camera import SimulatedCamera
from simulation.simulated_patterns import generate_dts_luminance, generate_dts_luminance_map
from cdp_calculator import CDPCalculator
from dts.roi_extractor import DTSRoIExtractor
from utils.misc import load_config


MAX_LUMINANCE = 50000  # luminance of the light source, in cd/m^2
NUM_IMAGES = 3  # capturing several frames will produce more accurate results

# load the configuration of the simulated camera
cfg = load_config('./utils/configurations/simulated_camera_24bit.json')

simulated_images_dir = './simulation/simulated_images'  # directory to save the simulated images
os.makedirs(simulated_images_dir, exist_ok=True)

# generate simulated luminance map of CDP pattern in DTS device
dts_luminance = generate_dts_luminance(max_luminance=MAX_LUMINANCE)
dts_luminance_map = generate_dts_luminance_map(dts_luminance)

# a simulated camera that captures the CDP pattern
camera = SimulatedCamera(cfg)
for i in range(NUM_IMAGES):
    print('Generating {}/{} HDR image'.format(i + 1, NUM_IMAGES))
    image = camera.capture_hdr(dts_luminance_map, f_num=8, exposure_time=0.001, iso=125, frames=3, ev_step=4)
    image = camera.tone_mapping(image)
    save_path = os.path.join(simulated_images_dir, 'simulated_image_{}.tiff'.format(i))