def decode_json_object(data: dict) -> Do: m = Map(data) tpe_s = yield m.lift(tpe_key).to_either( f'no `{tpe_key}` attr in json object {m}') tpe = yield Either.import_path(tpe_s) dec = yield Decoder.e(tpe) yield dec.decode(tpe, m)
def substitute(files: Files, path: Path, lnum: int, col: Either[str, int], error: str, coco_path: Path) -> Generator: lines = yield files.lift(path).to_either('corrupt state') line = yield lines.lift(lnum - 1).to_either(f'invalid line number {lnum} for {path}') lnum_match = yield lnum_rex.search(line) coco_lnum = yield lnum_match.group('lnum') coco_lnum_i = yield parse_int(coco_lnum) col_map = col / (lambda a: Map(col=a)) | Map() yield Right(Map(lnum=coco_lnum_i, text=error, valid=1, maker_name='mypy') ** col_map)
def parse_magnet(magnet) -> Either[str, Magnet]: pr = urlparse(magnet) if pr.scheme == 'magnet': q = Map(parse_qs(pr.query)).valmap(List.wrap) name = q.get('dn') // _.head return Right(Magnet(name, q)) else: return Left('not a magnet')
def flat_map(self): k1 = 'key' v1 = 'value' k2 = 'key2' v2 = 'value2' m = Map({k1: v1, k2: v2}) res = m.flat_map(lambda a, b: Just((a, b)) if a == k1 else Empty()) res.should.have.key(k1).being.equal(v1) res.should_not.have.key(k2)
def keymap(self): k1 = 'key' v1 = 'value' k2 = 'key2' v2 = 'value2' m = Map({k1: v1, k2: v2}) res = m.keymap(lambda a: len(a)) res.should.have.key(len(k1)).being.equal(v1) res.should.have.key(len(k2)).being.equal(v2)
def add_multi(self): key = 'key' val = 'value' k2 = 'key2' v2 = 'value2' m = Map({key: val}) m2 = m**Map({k2: v2}) m2.lift(k2).should.equal(Just(v2)) m.lift(k2).should.equal(Empty())
def add(self): key = 'key' val = 'value' k2 = 'key2' v2 = 'value2' m = Map({key: val}) m2 = m + (k2, v2) m2.lift(k2).should.equal(Just(v2)) m.lift(k2).should.equal(Empty())
def _search_yify(self): query = ' '.join(self.args) response = requests.get(yify_url, params=dict(query_term=query, limit=self._limit)) data = Map(response.json()) results = ((data.get('data') / Map // __.get('movies') / List.wrap / __.flat_map(SearchResultFactory.from_yify)) | List()) return self._handle_results(query, results)
def find(self): k1 = 'key' v1 = 'value' k2 = 'key2' v2 = 'value2' m = Map({k1: v1, k2: v2}) m.find(_ == v1).should.equal(Just((k1, v1))) m.find_key(_ == k2).should.equal(Just((k2, v2))) m.find(_ == 'invalid').should.equal(Empty()) m.find_key(_ == 'invalid').should.equal(Empty())
def traverse(self) -> None: def f(a: int) -> Either[str, int]: return Right(a * 2) Map({ 1: 2, 3: 4 }).traverse(f, Either).should.equal(Right(Map({ 1: 4, 3: 8 })))
def input(self): handlers = Map({ 'j': self._down, 'k': self._up, '%CR%': self._switch, 's': self._switch, 'p': self._pick, 'r': self._revert, 'q': self._close_tab, }) return handlers.get(self.msg.keyseq).flat_map(lambda f: f())
def synth_method(name: str, params: List[Tuple[str, Type]], statements: List[str], _globals: dict) -> FunctionType: id = f'synth_{name}__' params_s = params.map2(lambda n, t: f'{n}: {typename(t)}').join_comma param_globals = Map(params.map2(lambda n, t: (typename(t), t))) globs = Map(_globals) ** param_globals code = f'''\ def {name}(self, {params_s}) -> None: {statements.indent(4).join_lines} globals()['{id}'] = {name} ''' exec(code, globs) return globs.pop(id)
def match_title(monitor, title, res): r = monitor.release matches = Map(guessit(title)) attr = lambda key, target: matches.get(key).contains(target) search_name = canonicalize(r.effective_search_name) name = canonicalize(r.name) canonical_title = matches.get('title').map(canonicalize) return ((canonical_title.contains(search_name) or canonical_title.contains(name)) and (attr('screen_size', res) or (matches.get('screen_size').empty and res == '')) and ((attr('season', r.season) and attr('episode', r.episode)) or attr('date', r.airdate.date())))
def add_by_params(self): options = Map(self.msg.options) ident = self.msg.ident return ( (options.get('root') / mkpath // F(self.data.loader.from_params, ident, params=options)) .or_else( self.data.loader.by_ident(ident) .or_else(self.data.loader.resolve_ident( ident, options, self.data.main_type)) ) / Add | Error(self._no_such_ident(ident, options)) )
def purge(self, cmd): days = cmd.args.head | 30 self.log.info('Deleting releases older than {} days'.format(days)) data = Map(self.client.put('release/purge', dict(days=days))) return IO.pure( purge_msg.format(data['monitors'], data['links'], data['releases']) )
def __init__(self, c_module, run, omit, c_args=(), interval=0.2, name='series'): super().__init__(name=name) self._c_module = c_module self._c_args = c_args self._interval = interval self.name = name self._running = False self.components = List() self.component_map = Map() self._init_components(run, omit)
def from_yify(self, result): data = Map(result) title_long = data.get('title_long') | 'no title' title = data.get('title') | 'no title' def parse(torr): td = Map(torr) name = '{} {}'.format(title_long, td.get('quality') | '') size = td.get('size_bytes') | 0 size_str = sizeof_fmt(str(size)) seeds = td.get('seeds') | 0 hsh = td.get('hash') | 'no_hash' magnet_link = yify_magnet(title, hsh) return SearchResult(name, size, size_str, seeds, magnet_link) return data.get('torrents') / List.wrap / __.map(parse) | List()
def _instances(self): from amino import Map return Map({ Functor: MapFunctor(), Traverse: MapTraverse(), Monoid: MapMonoid(), })
def _pre_start_neovim(self): super()._pre_start_neovim() self.base = temp_dir('projects', 'base') self.base2 = temp_dir('projects', 'base2') self.typed1 = 'type1' self.type1_base = temp_dir('projects', self.typed1) self.type_bases = Map({self.type1_base: List(self.typed1)})
def is_handler(name: str, f: Callable) -> Do: effective = getattr(f, '__do_original', f) spec = yield Try(inspect.getfullargspec, effective).to_maybe param_name = yield Lists.wrap(spec.args).lift(1) param_type = yield Map(spec.annotations).lift(param_name) yield (Just((normalize_type(param_type), f)) if issubclass(param_type, alg) else Nothing)
def substitute_lnums(self) -> None: lines = List('sooo...In module imported from: asdf', 'amino/maybe.py:116:5: error: broken', 'foo/bar/__coconut__.py:22: error: nutt') return process_output(lines).should.equal( List(Map(lnum=82, text='broken', valid=1, maker_name='mypy', col=5)))
def __new__(cls: type, name: str, bases: tuple, namespace: dict, **kw) -> type: fs = Map(namespace).lift( '__init__') / inspect.getfullargspec / init_fields | Nil inst = super().__new__(cls, name, bases, namespace, **kw) if fs: inst._dat__fields_value = fs return inst
def copy(self, **kw: Any) -> Sub: updates = Map(kw) def update(f: Field) -> Any: return updates.lift(f.name) | (lambda: getattr(self, f.name)) updated = self._dat__fields / update return cast(Dat, type(self)(*updated))
def __new__(cls: type, name: str, bases: tuple, namespace: SimpleNamespace, **kw) -> type: fs = Map(namespace).lift( '__init__') / inspect.getfullargspec / init_fields | Nil inst = super().__new__(cls, name, bases, namespace, **kw) if not (fs.empty and hasattr(inst, '_dat__fields_value')): inst._dat__fields_value = fs return inst
def _instances(self): from amino import Map return Map({ Monad: ListMonad(), Traverse: ListTraverse(), Foldable: ListFoldable(), Zip: ListZip(), Monoid: ListMonoid(), })
def single_venv_config(name: str, spec: str, **extra_vars: Any) -> Tuple[Rplugin, Venv, TestConfig]: rplugin = simple_rplugin(name, spec) dir = temp_dir('rplugin', 'venv') vars = Map(chromatin_venv_dir=str(dir))**Map(extra_vars) conf = lens.basic.state_ctor.set(LogBufferEnv.cons)(chromatin_config) venv = Venv( rplugin.name, VenvMeta(name, dir / name, Right(Path('/dev/null')), Right(Path('/dev/null')))) return rplugin, venv, TestConfig.cons( conf, vars=vars, io_interpreter=single_venv_io_interpreter(venv), logger=buffering_logger, function_handler=test_function_handler(exists=1), command_handler=test_command_handler(), )
def traverse(self, fa: Map[Any, A], f: Callable[[A], B], tpe: Type[G]) -> G: monad = Monad.fatal(tpe) def folder(z, kv: Tuple[A, B]): k, v = kv return monad.map2(z.product(f(v)), lambda l, b: l.cat((k, b))) return fa.to_list.fold_left(monad.pure(Map()))(folder)
def init_fields(spec: inspect.FullArgSpec) -> List[Field]: args = Lists.wrap(spec.args).tail | Nil types = Map(spec.annotations) def field(name: str) -> Field: tpe = types.lift(name) | Val(Any) return Field(name, tpe) return args / field
def _instances(self): from amino import Map return Map({ Monad: MaybeMonad(), Optional: MaybeOptional(), Traverse: MaybeTraverse(), Foldable: MaybeFoldable(), Zip: MaybeZip(), })
def to_json(a: Any) -> Json: return ( JsonArray(Lists.wrap(a) / to_json) if isinstance(a, (list, tuple)) else JsonObject(Map(a).valmap(to_json)) if isinstance(a, dict) else JsonNull(None) if a is None else JsonScalar(a) )
def init_fields(init: FunctionType, globalns: dict) -> List[Field]: spec = inspect.getfullargspec(init) args = Lists.wrap(spec.args).tail | Nil types = Map(get_type_hints(init)) def field(name: str) -> Field: tpe = types.lift(name) | Val(Any) return Field(name, tpe) return args / field
def __new__(cls: type, name: str, bases: tuple, namespace: SimpleNamespace, **kw) -> type: mod = inspect.currentframe() caller = mod.f_back globalns = caller.f_globals fs = Map(namespace).lift('__init__') / ( lambda a: init_fields(a, globalns)) | Nil inst = super().__new__(cls, name, bases, namespace, **kw) if not (fs.empty and hasattr(inst, '_dat__fields_value')): inst._dat__fields_value = fs return inst
def cons() -> 'LogBufferEnv': return LogBufferEnv(Nil, Nothing, Nothing, Nil, Nil, Nil, Nil, Map(), Nil, log_buffer=Nil)
def from_params(self, ident: str, root: Path, params: Map): parts = List(*reversed(ident.split('/', 1))) name = parts[0] tpe = parts.lift(1).or_else(params.get('type')) kw = params.at('types', 'langs', 'history') return self.create(name, root, tpe=tpe, **kw)
def matcher(record: Map) -> Boolean: return (record.get('name').contains(name) and record.get('type').contains(tpe))
class ProteomePluginIntegrationSpec(PluginIntegrationSpec, IntegrationCommon, Logging): def setup(self): super().setup() self.vim.cmd_sync('ProteomeStart') self._wait_for(lambda: self.vim.vars.p('projects').is_just) self.vim.cmd('ProteomePostStartup') self._pvar_becomes('root_dir', str(self.main_project)) def _nvim_facade(self, vim): return NvimFacade(vim) def _pre_start_neovim(self): super()._pre_start_neovim() self.base = temp_dir('projects', 'base') self.base2 = temp_dir('projects', 'base2') self.typed1 = 'type1' self.type1_base = temp_dir('projects', self.typed1) self.type_bases = Map({self.type1_base: List(self.typed1)}) def _post_start_neovim(self): super()._post_start_neovim() self._set_vars() self.tpe1 = 'tpe' self.tpe2 = 'tpe2' self.name1 = 'pro' self.name2 = 'dep' self.ident1 = '{}/{}'.format(self.tpe1, self.name1) self.ident2 = '{}/{}'.format(self.tpe2, self.name2) self.main_tpe = self.base / self.tpe1 self.main_project = self.main_tpe / self.name1 dep = self.base / self.tpe2 / self.name2 self.main_project.mkdir(parents=True) dep.mkdir(parents=True) self.vim.cd(str(self.main_project)) def _set_vars(self): self.vim.vars.set_p('config_path', str(self._config_path)) self.vim.vars.set_p('base_dirs', List(str(self.base), str(self.base2))) self.vim.vars.set_p('type_base_dirs', self.type_bases.keymap(str)) self.vim.vars.set_p('history_base', str(self.history_base)) self.vim.vars.set_p('plugins', self._plugins) @property def plugin_class(self): return Right(ProteomeNvimPlugin) @property def _plugins(self): return List() def _pre_start(self): pass @property def _config_path(self): return Path('/dev/null') def _project_becomes(self, name): self._pvar_becomes_map('active', name, _['name'])