def __init__(self, **kwargs): self.cols = OrderedDict() self.col_order = OrderedDict() if 'reader_settings' in kwargs: self.reader_settings = kwargs['reader_settings'] else: self.reader_settings = {}
def _parse_blocks(self, lines): class Blocks: def __init__(self): self.imports = [] self.assignments = [] self.defs = [] self.set_up = [] self.tests = [] def add(self, target, accum): try: a = getattr(self, target) a.extend(accum) except AttributeError: pass blocks = Blocks() def begins_with(name): return lambda begin, line: begin == name def exact(name): return lambda begin, line: line.rstrip() == name patterns = OrderedDict([ (begins_with('def'), 'defs'), (begins_with('import'), 'imports'), (begins_with('from'), 'imports'), (exact('set up'), 'set_up'), (lambda begin, line: begin and '=' in line and begin != ' ', 'assignments'), (lambda begin, line: begin != ' ' and begin != '' and begin != '\n', 'tests'), ]) def match(line): begin = line.split(' ')[0] for k, v in patterns.items(): if k(begin, line): return v def accumulate(line): matched = match(line) if matched: blocks.add(accumulate.target, accumulate.items) accumulate.target = matched accumulate.items = [] accumulate.items.append(line) accumulate.target = '' accumulate.items = [] map(accumulate, lines) blocks.add(accumulate.target, accumulate.items) return blocks
def __init__(self, drop_p: float = 0.2, hidden_dim: int = 512, z_dim: int = None, num_classes: int = None, data_dir='', nsynth_class='', dataset='', train_transform=None, batch_size=128): """ Attaches a MLP for finetuning using the standard self-supervised protocol. Example:: from pl_bolts.callbacks.self_supervised import SSLOnlineEvaluator # your model must have 2 attributes model = Model() model.z_dim = ... # the representation dim model.num_classes = ... # the num of classes in the model Args: drop_p: (0.2) dropout probability hidden_dim: (1024) the hidden dimension for the finetune MLP """ super().__init__() self.hidden_dim = hidden_dim self.drop_p = drop_p self.optimizer = None self.z_dim = z_dim self.num_classes = num_classes self.output = OrderedDict() self.batch_size = batch_size if 'esc' in dataset: data_path = os.path.join(data_dir, dataset + ".lmdb") self.train_dataset = NormalLMDB_ESC(data_path, transform=train_transform, aug_transform=None, spec_transform=None, folds=[1, 2, 3, 4]) elif 'nsynth' in dataset: data_path = os.path.join(data_dir, "train_" + dataset + ".lmdb") self.train_dataset = NormalLMDBG_NSYNTH(data_path, transform=train_transform, aug_transform=None, spec_transform=None, target=nsynth_class, perc=1) else: data_path = os.path.join(data_dir, "train_" + dataset + ".lmdb") self.train_dataset = NormalLMDBG(data_path, transform=train_transform, aug_transform=None, spec_transform=None, perc=1)
def convert(line): s = OrderedDict([ ('==', 'Equal'), ('!=', 'NotEqual'), ('!~=', 'NotAlmostEqual'), ('~=', 'AlmostEqual'), (lambda line: bool(line.count('>') > 1), partial(_convert, '>', 'Greater')), (lambda line: bool(line.count('<') > 1), partial(_convert, '<', 'Less')), ('>=', 'GreaterEqual'), ('<=', 'LessEqual'), ('>', 'Greater'), ('<', 'Less'), ('raises', lambda line: ['with self.assertRaises(' + line.split('raises')[1].strip() + '):', ' ' + line.split('raises')[0].strip()]), (' is not instanceof ', 'NotIsInstance'), (' is instanceof ', 'IsInstance'), ('for ', lambda line: line), (' not in ', 'NotIn'), (' in ', 'In'), (' is not None', 'IsNotNone'), (' is None', 'IsNone'), (' is not ', 'IsNot'), (' is ', 'Is'), ]) def to_lambda(i): def to_code(k, v, line): l, op, r = line.rpartition(k) params = ', '.join(map(strip, (l, r))).rstrip().rstrip(',') return 'self.assert' + v + '(' + params + ')' k, v = i new_v = v if callable(v) else partial(to_code, k, v) del s[k] if callable(k): s[k] = new_v else: s[lambda line: k in line] = new_v map(to_lambda, s.items()) matches = filter(lambda k: k(line), s.keys()) return s[matches[0]](line) if len(matches) else line
def load_settings(self): ''' load settings from file or user input ''' self._get_user_settings() self._get_initial_settings() while True: # make sure selected settings are ok self.print_settings(self.settings_names, self.settings) settings_ok = raw_input('\nAre these settings ok? (y/n)> ') if not len(settings_ok): continue if settings_ok[0].lower() == 'y': break # clear current settings and prompt user again self.settings = OrderedDict() self.get_settings_from_user() if not self._using_prev_settings: while True: # prompt user to save new settings for future use save = raw_input('\nSave settings? (y/n)> ') if not len(save): continue if save[0].lower() == 'y': # write configs to file try: self.write_configs(self.settings_file, self.settings) except IOError: print "Error writing to config file %s" % self.settings_file break
def __init__(self, settings_file=None, user_settings_file=None): self.settings = OrderedDict() self.settings_file = settings_file self.user_settings = OrderedDict() self.user_settings_file = user_settings_file