示例#1
0
 def on_menu_reset(self, event):
     yes = utils.make_yesno_dlg(_(u'Are you sure you want to reset?'),
                                _(u'Are you sure?'))
     if yes:
         configs.load(self, app.get_real_path('default.cfg'))
         self.picker.view.path.SetValue(u'')
         self.picker.clear_all()
示例#2
0
def run_command(command, override_path=None, force=False):
  # Nondestructive commands that don't require cache.
  if command == Command.configs:
    config_list = configs.load(override_path=override_path)
    configs.print_configs(config_list)

  # Destructive commands that require cache but not configs.
  elif command == Command.purge:
    file_cache = FileCache(configs.set_up_cache_dir())
    file_cache.purge()

  # Commands that require cache and configs.
  else:
    config_list = configs.load(override_path=override_path)
    file_cache = FileCache(configs.set_up_cache_dir())

    if command == Command.publish:
      for config in config_list:
        file_cache.publish(config, version_for(config), force=force)

    elif command == Command.install:
      for config in config_list:
        file_cache.install(config, version_for(config), force=force)

    else:
      raise AssertionError("unknown command: " + command)
示例#3
0
def get_environment():

    conf = configs.load(ENV_FILE, defaults={'generals': {
        'TASKS_PER_SECOND': '1'
    }})

    return conf['generals'].dict_props
示例#4
0
def configure():
    # Load config
    with open("config.yaml") as config_f:
        config = Struct(**yaml.load(config_f))
    config = configs.load(config)

    # Set up experiment
    config.experiment_dir = os.path.join("experiments/%s" % config.name)
    if not os.path.exists(config.experiment_dir):
        os.mkdir(config.experiment_dir)
    files = glob.glob(os.path.join(config.experiment_dir, "*"))
    for f in files:
        os.remove(f)

    # Set up logging
    log_name = os.path.join(config.experiment_dir, "run.log")
    logging.basicConfig(
            filename=log_name, level=logging.DEBUG,
            format='%(asctime)s %(levelname)-8s %(message)s')

    def handler(type, value, tb):
        logging.exception("Uncaught exception: %s", str(value))
        logging.exception("\n".join(traceback.format_exception(type, value, tb)))
    sys.excepthook = handler

    logging.info("BEGIN")
    logging.info(str(config))

    return config
示例#5
0
def load_sources_from_config_file(path):
    c = configs.load(path)
    srcs = dict()
    for s in c['sources']:
        props = c[s].dict_props
        props['id'] = s
        srcs[s] = props
    return srcs
示例#6
0
    def load_configs(self, configurations=None):
        if not configurations:
            configurations = configs.load()
        if not "token" in configurations:
            raise Exception("'token' not found on loaded configurations.")

        if not "req_updates_timeout_sec" in configurations:
            raise Exception(
                "'req_updates_timeout_sec' not found on loaded configurations."
            )

        self.token = configurations["token"]
        self.req_updates_timeout_sec = configurations[
            "req_updates_timeout_sec"]
        self.url = "https://api.telegram.org/bot{}/".format(
            configurations["token"])
示例#7
0
    def __init__(self):
        """Initialization interface."""
        self.config = configs.Config()
        with open('languages.dat', 'rb') as lang_file:
            lang_dict = pickle.load(lang_file)
            self.config.set_languages(lang_dict['languages'])
            self.phrases = configs.load(
                lang_dict[self.config.general_language])
        super().__init__(None, wx.ID_ANY, self.phrases.titles.caption)
        self.command = Commands(self)
        self.menu = Menu(self)

        self.panel = wx.Panel(self, wx.ID_ANY)
        sizer_panel = wx.BoxSizer(wx.HORIZONTAL)
        sizer_panel.Add(self.panel, 1, wx.EXPAND | wx.ALL)
        self.SetSizer(sizer_panel)

        self.CreateStatusBar()
        self.__create_widgets()
        self.__create_bindings()
        self.set_values()
示例#8
0
def train(config=None, gpu_queue=None):

    try:
        gpu_idx = maybe_get_a_gpu() if gpu_queue is None else gpu_queue.get()
        os.environ["CUDA_VISIBLE_DEVICES"] = gpu_idx
    except Exception as e:
        print(str(e))

    day, hour = timestamp(separate=True)

    out_dir = os.path.join("models", config["model_name"],
                           config.get("model_type", ""), day, hour)

    os.makedirs(out_dir, exist_ok=True)
    configs.deep_update(config, {"out_dir": out_dir})

    configs.add(config, to=".running")

    model = MODELS[config["model_name"]](config)

    try:
        train_seq = model.get_sequence(config)
        eval_seq = model.get_sequence(config, istraining=False)
        configs.deep_update(config, {
            "train_seq": train_seq,
            "eval_seq": eval_seq
        })

        checkpoints = out_dir + '/inter_model_{epoch:02d}-{val_loss:.4f}.h5'
        if "RNN" in config["model_name"]:
            configs.deep_update(config,
                                {"reset_batches": train_seq.reset_batches})
            configs.deep_update(config, {"filepath": checkpoints})
        if 'Trackifier' in config['model_name']:
            configs.deep_update(config, {"filepath": checkpoints})

        callbacks = parse_callbacks(config["callbacks"])
        optimizer = getattr(keras_optimizers,
                            config["optimizer"])(**config["opt_params"])

        model.compile(optimizer)

        if isinstance(config['train_path'], list):
            for i, subject in enumerate(config['train_path']):
                samples_config = os.path.join(os.path.dirname(subject),
                                              'config.yml')
                samples_config = configs.load(samples_config)
                config['input_sampels_config_{0}'.format(i)] = samples_config
        else:
            samples_config = os.path.join(
                os.path.dirname(config['train_path']), 'config.yml')
            samples_config = configs.load(samples_config)
            config['input_sampels_config'] = samples_config
        repo = git.Repo(".")
        commit = repo.head.commit
        config['commit'] = str(commit)
        configs.save(config)

        print("\nStart training...")
        no_exception = True
        model.keras.fit_generator(
            train_seq,
            callbacks=callbacks,
            validation_data=eval_seq,
            epochs=config["epochs"],
            shuffle=config["shuffle"],
            max_queue_size=2000,
            verbose=1,
            workers=5,
            use_multiprocessing=True,
        )
    except KeyboardInterrupt:
        model.stop_training = True
    except Exception as e:
        shutil.rmtree(out_dir)
        no_exception = False
        raise e
    finally:
        configs.remove(config, _from=".running")
        if no_exception:
            configs.add(config, to=".archive")
            model_path = os.path.join(out_dir, "final_model.h5")
            print("\nSaving {}".format(model_path))
            model.keras.save(model_path)
        if gpu_queue is not None:
            gpu_queue.put(gpu_idx)

    return model.keras
示例#9
0
 def load_config(self, event):
     configs.load(self)
示例#10
0
 def on_menu_reset(self, event):
     yes = utils.make_yesno_dlg(_(u'Are you sure you want to reset?'), _(u'Are you sure?'))
     if yes:
         configs.load(self, app.get_real_path('default.cfg'))
         self.picker.view.path.SetValue(u'')
         self.picker.clear_all()
示例#11
0
 def load_config(self, event):
     configs.load(self)
示例#12
0
def run_rf_inference(config=None, gpu_queue=None):
    """"""
    try:
        gpu_idx = maybe_get_a_gpu() if gpu_queue is None else gpu_queue.get()
        os.environ["CUDA_VISIBLE_DEVICES"] = gpu_idx
    except Exception as e:
        print(str(e))

    print(
        "Loading DWI...")  ####################################################

    dwi_img = nib.load(config['dwi_path'])
    dwi_img = nib.funcs.as_closest_canonical(dwi_img)
    dwi_aff = dwi_img.affine
    dwi_affi = np.linalg.inv(dwi_aff)
    dwi = dwi_img.get_data()

    def xyz2ijk(coords, snap=False):
        ijk = (coords.T).copy()
        dwi_affi.dot(ijk, out=ijk)
        if snap:
            return np.round(ijk, out=ijk).astype(int, copy=False).T
        else:
            return ijk.T

    with open(os.path.join(config['model_dir'], 'model'), 'rb') as f:
        model = pickle.load(f)

    train_config_file = os.path.join(config['model_dir'], 'config.yml')
    bvec_path = configs.load(train_config_file, 'bvecs')
    _, bvecs = read_bvals_bvecs(None, bvec_path)

    terminator = Terminator(config['term_path'], config['thresh'])

    prior = Prior(config['prior_path'])

    print(
        "Initializing Fibers...")  ############################################

    seed_file = nib.streamlines.load(config['seed_path'])
    xyz = seed_file.tractogram.streamlines.data
    n_seeds = 2 * len(xyz)
    xyz = np.vstack([xyz, xyz])  # Duplicate seeds for both directions
    xyz = np.hstack([xyz, np.ones([n_seeds, 1])])  # add affine dimension
    xyz = xyz.reshape(-1, 1, 4)  # (fiber, segment, coord)

    fiber_idx = np.hstack([
        np.arange(n_seeds // 2, dtype="int32"),
        np.arange(n_seeds // 2, dtype="int32")
    ])
    fibers = [[] for _ in range(n_seeds // 2)]

    print(
        "Start Iteration...")  ################################################

    input_shape = model.n_features_
    block_size = int(np.cbrt(input_shape / dwi.shape[-1]))

    d = np.zeros([n_seeds, dwi.shape[-1] * block_size**3])
    dnorm = np.zeros([n_seeds, 1])
    vout = np.zeros([n_seeds, 3])
    for i in range(config['max_steps']):
        t0 = time()

        # Get coords of latest segement for each fiber
        ijk = xyz2ijk(xyz[:, -1, :], snap=True)

        n_ongoing = len(ijk)

        for ii, idx in enumerate(ijk):
            d[ii] = dwi[idx[0] - (block_size // 2):idx[0] + (block_size // 2) +
                        1, idx[1] - (block_size // 2):idx[1] +
                        (block_size // 2) + 1,
                        idx[2] - (block_size // 2):idx[2] + (block_size // 2) +
                        1, :].flatten()  # returns copy
            dnorm[ii] = np.linalg.norm(d[ii])
            d[ii] /= dnorm[ii]

        if i == 0:
            inputs = np.hstack(
                [prior(xyz[:, 0, :]), d[:n_ongoing], dnorm[:n_ongoing]])
        else:
            inputs = np.hstack(
                [vout[:n_ongoing], d[:n_ongoing], dnorm[:n_ongoing]])

        chunk = 2**15  # 32768
        n_chunks = np.ceil(n_ongoing / chunk).astype(int)
        for c in range(n_chunks):

            outputs = model.predict(inputs[c * chunk:(c + 1) * chunk])
            v = bvecs[outputs, ...]
            vout[c * chunk:(c + 1) * chunk] = v

        rout = xyz[:, -1, :3] + config['step_size'] * vout
        rout = np.hstack([rout, np.ones((n_ongoing, 1))]).reshape(-1, 1, 4)

        xyz = np.concatenate([xyz, rout], axis=1)

        terminal_indices = terminator(xyz[:, -1, :])

        for idx in terminal_indices:
            gidx = fiber_idx[idx]
            # Other end not yet added
            if not fibers[gidx]:
                fibers[gidx].append(np.copy(xyz[idx, :, :3]))
            # Other end already added
            else:
                this_end = xyz[idx, :, :3]
                other_end = fibers[gidx][0]
                merged_fiber = np.vstack(
                    [np.flip(this_end[1:], axis=0),
                     other_end])  # stitch ends together
                fibers[gidx] = [merged_fiber]

        xyz = np.delete(xyz, terminal_indices, axis=0)
        vout = np.delete(vout, terminal_indices, axis=0)
        fiber_idx = np.delete(fiber_idx, terminal_indices)

        print(
            "Iter {:4d}/{}, finished {:5d}/{:5d} ({:3.0f}%) of all seeds with"
            " {:6.0f} steps/sec".format(
                (i + 1), config['max_steps'], n_seeds - n_ongoing, n_seeds,
                100 * (1 - n_ongoing / n_seeds), n_ongoing / (time() - t0)),
            end="\r")

        if n_ongoing == 0:
            break

        gc.collect()

    # Include unfinished fibers:

    fibers = [
        fibers[gidx] for gidx in range(len(fibers)) if gidx not in fiber_idx
    ]
    # Save Result

    fibers = [f[0] for f in fibers]

    tractogram = Tractogram(streamlines=ArraySequence(fibers),
                            affine_to_rasmm=np.eye(4))

    timestamp = datetime.datetime.now().strftime("%Y-%m-%d-%H:%M:%S")
    out_dir = os.path.join(os.path.dirname(config["dwi_path"]),
                           "predicted_fibers", timestamp)

    configs.deep_update(config, {"out_dir": out_dir})

    os.makedirs(out_dir, exist_ok=True)

    fiber_path = os.path.join(out_dir, timestamp + ".trk")
    print("\nSaving {}".format(fiber_path))
    TrkFile(tractogram, seed_file.header).save(fiber_path)

    config_path = os.path.join(out_dir, "config.yml")
    print("Saving {}".format(config_path))
    with open(config_path, "w") as file:
        yaml.dump(config, file, default_flow_style=False)

    if config["score"]:
        score_on_tm(fiber_path)

    return tractogram
示例#13
0
 def getConfig(self):
     try:
         self.config = configs.load(self.configFile())
         return self.config
     except Exception:
         self.getLogger().exception('Error: ')
示例#14
0
    os.makedirs(out_dir, exist_ok=True)
    config_path = os.path.join(out_dir, "config" + ".yml")
    print("Saving {}".format(config_path))
    with open(config_path, "w") as file:
        yaml.dump(configs, file, default_flow_style=False)

    model_path = os.path.join(out_dir, 'model')
    print("Saving {}".format(model_path))
    with open(model_path, 'wb') as f:
        pickle.dump(clf, f)

    return clf, inputs, outputs, output_classes


if __name__ == '__main__':

    parser = argparse.ArgumentParser(description="Train the entrack model")

    parser.add_argument("config_path", type=str, nargs="?",
                        help="Path to model config.")

    parser.add_argument("--max_n_samples", type=int,
                        help="Maximum number of samples to be used for both "
                             "training and evaluation")
    args = parser.parse_args()

    configs = load(args.config_path)
    if args.max_n_samples is not None:
        configs['max_n_samples'] = args.max_n_samples
    train_model(configs)