Exemple #1
0
    def __init__(self, args=DEFAULT_ARGS):
        args = fill_undefined_args(args, DEFAULT_ARGS, to_bunch=True)
        super().__init__()
        self.args = args

        if not hasattr(self.args, 'sim_x_seed'):
            self.args.sim_x_seed = np.random.randint(1e6)

        self.x_seed = self.args.sim_x_seed
        self.activation = torch.tanh
        self.tau_x = 10

        # weights in the recurrent net
        self.J = nn.Linear(self.args.N_sim, self.args.N_sim, bias=False)
        self.S_u = nn.Linear(self.args.L + self.args.D,
                             self.args.N_sim,
                             bias=False)
        self.S_ro = nn.Linear(self.args.N_sim, self.args.L)

        # dealing with latent output
        self.latency = self.args.sim_latency
        self.latent_idx = -1
        self.latent_arr = [None] * self.latency
        self.latent_decay = self.args.latent_decay
        self.latent_out = 0
Exemple #2
0
    def __init__(self, args=DEFAULT_ARGS):
        args = fill_undefined_args(args, DEFAULT_ARGS, to_bunch=True)
        super().__init__()
        self.args = args

        self.W_sim = nn.Sequential(
            nn.Linear(self.args.L + self.args.D, 16, bias=self.args.bias),
            nn.Tanh(), nn.Linear(16, self.args.L, bias=self.args.bias))

        if args.sim_path is not None:
            self.load_state_dict(torch.load(args.sim_path))
Exemple #3
0
    def __init__(self, args=DEFAULT_ARGS):
        super().__init__()
        args = fill_undefined_args(args, DEFAULT_ARGS, to_bunch=True)
        self.args = args

        if not hasattr(self.args, 'network_seed'):
            self.args.network_seed = random.randrange(1e6)
        self._init_vars()
        if self.args.model_path is not None:
            self.load_state_dict(torch.load(args.model_path))

        self.latent = Latent(self.args.h_latency, self.args.latent_decay)
        self.reset()
Exemple #4
0
    def __init__(self, args=DEFAULT_ARGS):
        super().__init__()
        self.args = fill_undefined_args(args, DEFAULT_ARGS, to_bunch=True)

        if not hasattr(self.args, 'res_seed'):
            self.args.res_seed = random.randrange(1e6)
        if not hasattr(self.args, 'res_x_seed'):
            self.args.res_x_seed = np.random.randint(1e6)

        self.tau_x = 10

        self._init_vars()
        self.reset()
Exemple #5
0
    def __init__(self, args=DEFAULT_ARGS):
        super().__init__()
        args = fill_undefined_args(args, DEFAULT_ARGS, to_bunch=True)
        self.args = args

        if not hasattr(self.args, 'network_seed'):
            self.args.network_seed = random.randrange(1e6)
        self._init_vars()
        self.log_h_yes = Latent(1, 0.95)
        self.log_s_yes = Latent(1, 0.95)
        self.log_conf = Latent(1, 0.95)

        self.switch = False

        self.reset()
Exemple #6
0
    def __init__(self, args=DEFAULT_ARGS):
        super().__init__()
        args = fill_undefined_args(args, DEFAULT_ARGS, to_bunch=True)
        self.args = args

        if not hasattr(self.args, 'network_seed'):
            self.args.network_seed = random.randrange(1e6)
        self._init_vars()
        if self.args.model_path is not None:
            self.load_state_dict(torch.load(args.model_path))

        self.out_act = get_output_activation(self.args)
        self.network_delay = args.network_delay

        self.reset()
Exemple #7
0
# assuming config is in the same folder as the model
config = get_config(args.model)

if args.noise != 0:
    J = model['W_f.weight']
    v = J.std()
    shp = J.shape
    model['W_f.weight'] += torch.normal(0, v * .5, shp)

    J = model['W_ro.weight']
    v = J.std()
    shp = J.shape
    model['W_ro.weight'] += torch.normal(0, v * .5, shp)

config = fill_undefined_args(args, config, overwrite_none=True)

net = load_model_path(args.model, config=config)

if args.test_all:
    _, loss2 = test_model(net, config)
    print('avg summed loss (all):', loss2)

if not args.no_plot:
    data, loss = test_model(net, config, n_tests=6)
    print('avg summed loss (plotted):', loss)

    run_id = '/'.join(args.model.split('/')[-3:-1])

    fig, ax = plt.subplots(2, 3, sharex=True, sharey=True, figsize=(12, 7))
Exemple #8
0
def adjust_args(args):
    # don't use logging.info before we initialize the logger!! or else stuff is gonna fail

    # dealing with slurm. do this first!! before anything else other than seed setting, which we want to override
    if args.slurm_id is not None:
        from parameters import apply_parameters
        args = apply_parameters(args.param_path, args)

    # in case we are loading from a model
    # if we don't use this we might end up with an error when loading model
    if args.model_path is not None:
        config = get_config(args.model_path)
        args = fill_undefined_args(args, config, overwrite_none=True)
        enforce_same = [
            'N', 'D', 'L', 'Z', 'T', 'net', 'bias', 'use_reservoir'
        ]
        for v in enforce_same:
            if v in config and args.__dict__[v] != config[v]:
                print(
                    f'Warning: based on config, changed {v} from {args.__dict__[v]} -> {config[v]}'
                )
                args.__dict__[v] = config[v]

    # shortcut for specifying train everything including reservoir
    if args.train_parts == ['all']:
        args.train_parts = ['']

    # # output activation depends on the task / dataset used
    # if args.out_act is None:
    #         args.out_act = 'none'

    # set the dataset
    if 'goals' in args.dataset:
        args.dset_type = 'goals'
    elif 'copy' in args.dataset:
        args.dset_type = 'copy'
    else:
        args.dset_type = 'unknown'

    # use custom goals loss for goals dataset, override default loss fn
    if args.dset_type == 'goals':
        args.loss = 'goals'

    args.argv = sys.argv

    # setting seeds
    if args.seed is None:
        args.seed = random.randrange(1e6)
    if args.network_seed is None:
        args.network_seed = random.randrange(1e6)
    if args.res_seed is None:
        args.res_seed = random.randrange(1e6)

    torch.manual_seed(args.seed)
    np.random.seed(args.seed)
    random.seed(args.seed)

    # initializing logging
    # do this last, because we will be logging previous parameters into the config file
    if not args.no_log:
        if args.slurm_id is not None:
            log = log_this(args,
                           'logs',
                           os.path.join(
                               args.name.split('_')[0],
                               args.name.split('_')[1]),
                           checkpoints=args.log_checkpoint_models)
        else:
            log = log_this(args,
                           'logs',
                           args.name,
                           checkpoints=args.log_checkpoint_models)

        logging.basicConfig(format='%(message)s',
                            filename=log.run_log,
                            level=logging.DEBUG)
        console = logging.StreamHandler()
        console.setLevel(logging.DEBUG)
        logging.getLogger('').addHandler(console)
        args.log = log
    else:
        logging.basicConfig(format='%(message)s', level=logging.DEBUG)
        logging.info('NOT LOGGING THIS RUN.')

    # logging, when loading models from paths
    if args.model_path is not None:
        logging.info(f'Using model path {args.model_path}')

    logging.info(
        f'Seeds:\n  general: {args.seed}\n  network: {args.network_seed}')

    return args