Exemple #1
0
 def allocate_buffers(self):
   C, N, P, T, B, Bp = self.job.C, self.job.N, self.job.P, self.job.T, self.job.B, self.job.Bp
   sample_dims = { 'a': (N, P+T-1), 'x': (C, T), 'xhat': (C,T),
       'dx': (C,T), 'dphi': (C,N,P), 'E': (1,), 'l1_penalty': (1,), 'a_l0_norm': (N,),
       'a_l1_norm': (N,), 'a_l2_norm': (N,), 'a_variance': (N,) }
   nbuf_dims = pf.merge({'npats': (1,), 'phi': (C,N,P)},
     { k: (Bp,) + v for (k,v) in sample_dims.items() })
   rbuf_dims = pf.merge(sample_dims,
       {'x': (B,) + sample_dims['x'], 'npats': mpi.procs})
   self.nbuf = pf.data(**{k: np.zeros(v) for (k,v) in nbuf_dims.items()})
   self.rbuf = pf.data(**{k: np.zeros(v) for (k,v) in rbuf_dims.items()})
   self.nbuf.npats = self.nbuf.npats.astype(int)
   self.rbuf.npats = self.rbuf.npats.astype(int)
   self.nbuf.sum = pf.data(**{'dphi': np.zeros(sample_dims['dphi'])})
Exemple #2
0
def configure(**kwargs):
  defaults = {
      'coder': None,
      'spikenet': None,
      'output_root': os.path.join(os.path.expanduser('~'), 'sparco_out'),
      'trial_directory': 'trial{0}'.format(time.strftime('%y%m%d%H%M%S')),
      'snapshot_interval': 100,
      'log_level': 'INFO',
      'log_format': '%(asctime)s %(message)s',
      'RootSpikenet': {},
      'SparseCoder': {}
      }
  settings = pfacets.merge(defaults, kwargs)
  settings['SparseCoder']['RootSpikenet_config'] = settings['RootSpikenet']

  output_path = os.path.join(settings['output_root'], settings['trial_directory'])
  pfacets.mkdir_p(output_path)

  log_path = (settings.get('log_path')
      or os.path.join(settings['output_root'], 'sparco.log'))
  logging.basicConfig(filename=log_path, filemode='a+', format=settings['log_format'],
      level=getattr(logging, settings['log_level'].upper()))
  sparco.trace.sp.Tracer.snapshot_interval = settings['snapshot_interval']

  sparco.trace.sparse_coder.Tracer.output_path = output_path
  if settings['coder']:
    traceutil.tracer.apply_tracer(sparco.trace.sparse_coder.Tracer,
        target=settings['coder'], **settings['SparseCoder'])
 def run(self, weights={}):
   """Run each prediction net configuration.
   """
   self.weights = weights
   self.start_index = mpi.bcast_obj(self.start_index)
   for self.t in range(self.start_index, len(self.configs)):
     config = pf.merge(self.configs[self.t], self.weights)
     self.iteration(config)
 def t_create_pnet(tracer, orig, self, *args, **kwargs):
   pn = orig(self, *args, **kwargs)
   if mpi.rank == mpi.root:
     logging.info('Pass {0}'.format(self.t))
     pn_output_path = osp.join(tracer.output_path, '{0}'.format(self.t))
     conf = pf.merge(tracer.PredictionNet_config,
       {'objective_buffer': tracer.objective_buffer[self.t]})
     traceutil.tracer.apply_tracer(pnet.trace.prediction_net.Tracer,
         output_path=pn_output_path, target=pn, **conf)
   return pn
Exemple #5
0
 def run_core(s):
   if s.core is not None:
     if not (s.root_only and mpi.rank != mpi.root):
       klass = s.resolve_core_class(s.core)
       into = s.resolve_pass_spec(s.into)
       conf = pf.merge(s.conf, into)
       s._core = klass(**conf)
       s._core.run()
       out = s.resolve_pass_spec(s.out, target=s._core)
       s.buf_merge(out)
     s.buf = mpi.bcast_obj(s.buf)
Exemple #6
0
 def adjust_image_settings(s):
   s.image_settings = pf.merge(s.image_settings, 
       {'vmin': np.min(s.data), 'vmax': np.max(s.data)})
def config_key(config):
  config = (config.__dict__ if isinstance(config, PredictionNet)
      else pf.merge(PredictionNet.defaults, config))
  tup = (config['X'].shape[0],)
  return "niter_{0}".format(*tup)
Exemple #8
0
########### BUILD CONFIGURATON

args = parse_args()
cli_config = pfacets.map_object_to_dict(
    args, {
        'input_path': ['sampler', 'input_path'],
        'output_root': ['trace', 'output_root'],
        'snapshot_interval': ['trace', 'snapshot_interval'],
        'produce_output': ['produce_output'],
        'trial_directory': ['trace', 'trial_directory']
    })
config_module = pfacets.load_local_module(path=args.local_config_path,
                                          default_name='.sparcorc')
local_config = config_module.config if config_module else {}
config = pfacets.merge(local_config, cli_config)
config.setdefault('run_ladder', True)

########### RUN


def run_coder(config):
    sc = sparco.sparse_coder.SparseCoder(config['nets'])
    if config['produce_output'] or config['output_root']:
        sparco.trace.configure(coder=sc, **config['trace'])
    sc.run()


sampler = sparco.sampler.Sampler(**config['sampler'])
for c in config['nets']:
    c['sampler'] = sparco.sampler.Sampler(
 def __init__(self, **kwargs):
   """Configure the Tracer for a RootSpikenet."""
   kwargs = pf.merge(Tracer.defaults, kwargs)
   super(Tracer, self).__init__(**kwargs)
Exemple #10
0
      }
    }

config_module = pfacets.load_local_module(path=args.local_config_path,
    default_name='.sparcorc')
local_config = config_module.config if config_module is not None else {}

# TODO temp
cli_config = pfacets.map_object_to_dict(args, {
  'output_root': ['run', 'output_root']
  # 'posmat': ['feature', 'plotter_settings', 'posmat'],
  # 'aspect': ['image', 'aspect'],
  # 'origin': ['image', 'origin'],
  })

config = pfacets.merge(defaults, local_config, cli_config)

########### DERIVED AND DYNAMIC DEFAULTS

if args.posmat_path is not None:
  posmat = np.int32(h5py.File(args.posmat_path, 'r')['electrode_layout'])
  config['feature'].setdefault('plotter_settings', {})['posmat'] = posmat

order = ['job', 'run', 'dictionary', 'feature']
keys = order[order.index(args.input_type):]
settings = config[keys[0]]
curr = settings
for k in keys[1:]:
  curr[k +'_settings'] = config[k]
  curr = curr[k+'_settings']
Exemple #11
0
profile_configs = map(lambda x: dict(zip(profile_space.keys(), x)),
    itertools.product(*tuple(profile_space.values())))

trial_base_dir = 'profiling{0}'.format(time.strftime('%y%m%d%H%M%S'))

for pc in profile_configs:
  dirname = '_'.join(itertools.imap(
    lambda k,v: '{0}_{1}'.format(k,v), pc.keys(), pc.values()))
  config['nets'].append({
    'trace': {
      # 'trial_directory': os.path.join(trial_base_dir, dirname)
      'trial_directory': dirname
      },
    'sampler': pfacets.merge( copy.deepcopy(config['sampler']),
      {
        'channels': np.arange(pc['channels']),
        'patches_length': pc['patch_length']
        }
      ),
    'basis_method': pc['basis_method'],
    'dictionary_size': 100,
    'convolution_time_length': 64,
    'batch_size': mpi.procs * pc['patches_per_node'],
    'num_iterations': 2000,
    'run_time_limit': 120,
    'target_angle': 5,
    'max_angle': 10,
    'inference_settings': {
      'lam': 0.1,
      'maxit': 10
      }
    })
Exemple #12
0
 def __init__(s, spec):
   for k,v in pf.merge(Node.defaults, spec).items():
     setattr(s, k, v)
   s.buf = {}
Exemple #13
0
 def __init__(s, **kwargs):
   for k,v in pf.merge(s.__class__.settings, kwargs).items():
     setattr(s, k, v)
   s.setup()
Exemple #14
0
 def __new__(meta, name, bases, dct):
   sets = [ getattr(b, 'settings', {}) for b in bases ]
   sets.append( dct.get('settings', {}) )
   dct['settings'] = pf.merge(*sets)
   return super(CoreMeta, meta).__new__(meta, name, bases, dct)
Exemple #15
0
      'channels': ['sampler', 'channels'],
      'input_path': ['sampler', 'input_path'],
      'time_dimension': ['sampler', 'time_dimension'],
      'inner_output_directory': ['trace', 'inner_output_directory'],
      'log_level': ['trace', 'log', 'level'],
      'log_path': ['trace', 'log', 'filename'],
      'mode': ['mode'],
      'output': ['trace', 'enable'],
      'output_root': ['trace', 'output_root'],
      'snapshot_interval': ['trace', 'PredictionNet', 'snapshot_interval'],
      })

  if args.resume is not None:
    with open(osp.join(args.resume, 'cli_config.json'), 'r') as f:
      prev_cli_config = json.loads(f.read())
    cli_config = pf.merge(prev_cli_config, cli_config)
    local_config_path = glob.glob(osp.join(args.resume, '*.py'))[0]
  else:
    local_config_path = args.local_config_path
else:
  local_config_path = None
  cli_config = None

local_config_path = mpi.bcast_obj(local_config_path)
cli_config = mpi.bcast_obj(cli_config)

# all procs must load the local config because it contains unbound methods,
# which can't be pickled

local_config = pf.load_local_module(local_config_path,
    default_name='.pnetrc').config or {}
Exemple #16
0
 def initialize_wrappers(self, wrappers):
   self.wrappers = pf.merge(
       getattr(self.__class__, 'wrappers', {}), wrappers, mergelists=True)
Exemple #17
0
  return args

########### BUILD CONFIGURATON

args = parse_args()
cli_config = pfacets.map_object_to_dict(args, {
    'input_path': ['sampler', 'input_path'],
    'output_root': ['trace', 'output_root'],
    'snapshot_interval': ['trace', 'snapshot_interval'],
    'produce_output': ['produce_output'],
    'trial_directory': ['trace', 'trial_directory']
    })
config_module = pfacets.load_local_module(path = args.local_config_path,
    default_name='.sparcorc')
local_config = config_module.config if config_module else {}
config = pfacets.merge(local_config, cli_config)
config.setdefault('run_ladder', True)

########### RUN

def run_coder(config):
  sc = sparco.sparse_coder.SparseCoder(config['nets'])
  if config['produce_output'] or config['output_root']:
    sparco.trace.configure(coder=sc, **config['trace'])
  sc.run()

sampler = sparco.sampler.Sampler(**config['sampler'])
for c in config['nets']:
  c['sampler'] = sparco.sampler.Sampler(**pfacets.merge(config['sampler'], c['sampler']))

# TODO need to manage this better
Exemple #18
0
arg_parser.add_argument('--job-name',
    help='name for this job')
arg_parser.add_argument('--output-root',
    help='path to directory in which to place output')
arg_parser.add_argument('--spearmint-db-path',
    help='path to json file with spearmint database')

if mpi.rank == mpi.root:

  args = arg_parser.parse_args()

  cli_config = pf.map_object_to_dict(args, {
    'output_root': ['conf', 'output_base'],
    'input_data_path': ['conf', 'input_data_path'],
    'name': ['name'],
    })

  # cli_sub_config = pf.map_object_to_dict(args, {
  #   'input_path': ['conf', 'input_path']
  #   })

  if re.search(r'\.py$', args.local_config_path):
    local_config = pf.load_local_module(args.local_config_path).config
  elif re.search(r'\.json$', args.local_config_path):
    local_config = json.load(open(args.local_config_path))
  config = pf.merge(local_config, cli_config)
  # config['children'] = [ pf.merge(conf, cli_sub_config) for conf in local_config['children'] ]

config = mpi.bcast_obj(config if 'config' in locals() else None)
sp.job.Node(config).run()