def markdown(content: str, **kwargs): class HtmlWithMeta(str): meta = None md = _markdown.Markdown( extensions=_.pipe( _.concatv( ['larc.markdown.meta_yaml', 'larc.markdown.yaml_data', 'larc.markdown.card', 'larc.markdown.table'], ['extra', 'codehilite'], kwargs.get('extensions', []), ), set, tuple, ), extension_configs=_.merge( { 'extra': {}, 'codehilite': { 'noclasses': True, 'guess_lang': False, }, }, kwargs.get('extension_configs', {}), ), ) output = HtmlWithMeta(md.convert(content)) output.meta = md.meta or {} return output
def _extend_data(filters, data, inv_idx, emp_idx): invoices = [x[inv_idx] for x in data] get_employee_map = compose( valmap(lambda x: [x.get("pb_sales_employee"), x.get("pb_sales_employee_name")]), valmap(first), groupby("name"), lambda: frappe.db.sql( """ SELECT name, pb_sales_employee, pb_sales_employee_name FROM `tabSales Invoice` WHERE name IN %(invoices)s """, values={"invoices": invoices}, as_dict=1, ), ) employees = get_employee_map() if invoices else {} set_employee = compose(list, lambda x: concatv(x, employees[x[inv_idx]])) extended = [set_employee(x) for x in data] if not filters.sales_employee: return extended return [x for x in extended if x[emp_idx] == filters.sales_employee]
def arp_table(): log.info('Getting ARP info') arp_items = get_arp() names, ips, macs, companies = _.pipe( arp_items, _.map(lambda i: ( i.get('name', ''), i.get('ip', ''), i.get('mac', ''), i.get('info', {}).get('company', ''), )), lambda items: zip(*items), ) max_n, max_i, max_m, max_c = _.pipe( [names, ips, macs, companies], _.map(lambda l: max(l, key=len)), _.map(len), tuple, ) header = [ ['Name', 'IP', 'MAC', 'Company'], ['-' * max_n, '-' * max_i, '-' * max_m, '-' * max_c], ] _.pipe( _.concatv(header, zip(names, ips, macs, companies)), __.vmap(lambda n, i, m, c: ( n.ljust(max_n), i.ljust(max_i), m.ljust(max_m), c.ljust(max_c) )), _.map(' '.join), '\n'.join, print, )
def find_course_root(path: (str, Path)): path = Path(path).expanduser().resolve() for parent in _.concatv([path], path.parents): paths = content_paths(parent) if any(p.exists() for p in paths.values()): return parent log.error( f'Could not find any course root for {path}' )
def _get_conditions(*conditions): return " AND ".join( concatv( [ "gp.docstatus = 1", "gp.posting_date >= %(from)s", "gp.posting_date <= %(to)s", ], [first(x) for x in filter(lambda x: x, conditions)], ))
def _get_conditions(*conditions): return " AND ".join( concatv( [ "gp.docstatus = 1", "gp.posting_date >= %(from)s", "gp.posting_date <= %(to)s", ], *conditions, ))
def _extend_data(filters, data, inv_idx, emp_idx): invoices = [x[inv_idx] for x in data] get_employee_map = compose( valmap(lambda x: [x.get("pb_sales_employee"), x.get("pb_sales_employee_name")]), valmap(first), groupby("name"), lambda: frappe.db.sql( """ SELECT name, pb_sales_employee, pb_sales_employee_name FROM `tabSales Invoice` WHERE name IN %(invoices)s """, values={"invoices": invoices}, as_dict=1, ), ) employees = get_employee_map() if invoices else {} set_employee = compose(list, lambda x: concatv(employees[x[inv_idx]], x)) set_commission = compose( list, lambda x: concatv( x, [ x[filters.net_amount_col_idx] * frappe.utils.flt( filters.commission_rate) / 100 ], ), ) make_row = compose(set_employee, set_commission) extended = [make_row(x) for x in data] if not filters.sales_employee: return extended return [x for x in extended if x[emp_idx] == filters.sales_employee]
def pipe(graph, selection): return GraphPipe( graph, selection, *_.pipe( _.concatv( [{ 'neighbors_of_type': neighbors_of_type, 'neighbor_selector': neighbor_selector, 'get': get, 'items': items, }], function_spaces, ), _.map(to_function_space), ))
def neighbor_selector(selectors: T.Iterable[Selector], graph: nx.Graph, node_id: T.Any, **attrs) -> T.Iterable[T.Tuple[T.Any, dict]]: if attrs: selectors = _.concatv( selectors, [ lambda graph, node_id, edge_data: all( v(graph.nodes[node_id][k]) for k, v in attrs.items()) ], ) return _.pipe( graph[node_id].items(), __.vfilter(lambda node_id, edge_data: all( s(graph, node_id, edge_data) for s in selectors)), )
def render_remark_slides(course: Endpoint, course_root: str, path: str, **template_kw): '''Render remark.js slides from a single Markdown file ''' path = resolve_path(course_root, path) env = template_environment(course, course_root, **template_kw) @curry def render(path, **kw): return pipe( path.read_text(), env.from_string, call('render', **kw), ) config = get_config() branding = config.get('branding', {}) sources = { 'css': pipe( concatv( remark_css_sources(), [render(j2_path('remark-slide.css.j2'), branding=branding)]), tuple, ), 'js': remark_js_sources(), } markdown = render(path) html = pipe( j2_path('remark-slide-deck.html.j2'), render(sources=sources, markdown=markdown), ) return html
def format_hours(to_clipboard, from_clipboard): hours = [] if from_clipboard: hours = _.pipe( pyperclip.paste().splitlines(), _.map(lambda l: l.split('\t')), tuple, ) log.info(hours) if len(hours[0]) == 2: hours = [tuple(_.concatv(h, [''])) for h in hours] output_func = sys.stdout.write if to_clipboard: output_func = pyperclip.copy _.pipe( hours, _.map(lambda h: CommentedMap([('date', parse_format(h[ 0])), ('hours', float(h[1])), ('desc', h[2])])), tuple, larc.yaml.dump, output_func, )
def graph_pipe(*function_spaces: common.FunctionSpace): return common.graph_pipe(*_.concatv([type_selectors], function_spaces))
def column_names(self): """Include the index names in the column names.""" return tuple(concatv(self.index.names, self.columns))
def __dir__(self): return concatv(super().__dir__(), list(self.get_params()))
def __dir__(self): return concatv(super().__dir__(), self._metadata)
def __dir__(self): """Extend the completer.""" return list( concatv(super().__dir__(), dir(self.estimator), concat(map(dir, self.env.extensions.values()))))
def __call__(self, *parts, **kw): return Endpoint(self.api, tuple(concatv(self.parts, parts)), **kw)
def __dir__(self): """Extend the predicted attributes.""" return concatv(super().__dir__(), dir(self.func))
def process_map(func, iterable, *iterables, **tpe_kw): with concurrent.futures.ProcessPoolExecutor(**tpe_kw) as executor: for value in executor.map(func, *concatv((iterable, ), *iterables)): yield value
def signature_to_tuple(sig): return pipe( concatv(dissoc(sig._asdict(), 'python').items(), (('python', tuple(sig.python._asdict().items())),)), tuple, )
first, concatv, ) from .common import ( Null, ) log = logging.getLogger(__name__) log.addHandler(logging.NullHandler()) _py_attr = [ 'branch', 'build', 'compiler', 'implementation', 'revision', 'version' ] _sig_attr = pipe( concatv( ['hostname', 'platform', 'architecture', 'machine', 'processor'], pipe(_py_attr, map(lambda a: f'python_{a}'), tuple), )) Signature = namedtuple('Signature', _sig_attr) def host_signature(): return Signature( socket.gethostname(), platform.platform(), platform.architecture(), platform.machine(), platform.processor(), *pipe( _py_attr, map(lambda a: f'python_{a}'), map(lambda a: getattr(platform, a)),
def gevent_vmap(func, iterable, *iterables, max_workers=10): pool = gevent.pool.Pool(max_workers) return pool.map(vcall(func), *concatv((iterable, ), *iterables))
def main(): parser = argparse.ArgumentParser(description=__doc__) parser.add_argument('-d', '--debug', action='store_true', default=False, help='Display debug messages') parser.add_argument('-v', '--verbose', action='store_true', default=False, help='Increase output verbosity') parser.add_argument('json_dir', help='Directory containing the JSON AST and data files') global args args = parser.parse_args() logging.basicConfig( level=logging.DEBUG if args.debug else (logging.INFO if args.verbose else logging.WARNING), stream=sys.stdout, ) if not os.path.exists(args.json_dir): parser.error('json_dir {!r} does not exist'.format(args.json_dir)) if not os.path.isdir(generated_dir_path): os.mkdir(generated_dir_path) # Initialize a variables_definitions object and set global variable in visitors variables_definitions = python_source_visitors.variables_definitions = VariablesDefinitions() # Transpile verification functions verif_sources = list( mapcat(load_verifs_file, iter_ast_json_file_names(filenames=['coc*.json', 'coi*.json'])) ) verifs_source = Template("""\ from ..formulas_helpers import arr, cached, inf, interval, null, positif, positif_ou_nul, present, somme def get_errors(formulas, saisie_variables): errors = [] $verifs return errors or None """).substitute(verifs=textwrap.indent('\n'.join(verif_sources), prefix=4 * ' ')) write_source_file( file_name='verifs.py', source=verifs_source, ) # Transpile formulas constants = loaders.load_constants() source_by_formula_name = dict(list(mapcat( load_regles_file, iter_ast_json_file_names(filenames=['chap-*.json', 'res-ser*.json']), ))) def get_formula_source(variable_name): source = source_by_formula_name.get(variable_name) if source is not None: return source if variables_definitions.is_saisie(variable_name): return python_source_visitors.make_formula_source( expression='saisie_variables.get({!r}, 0)'.format(variable_name), formula_name=variable_name, ) if variable_name in constants: return python_source_visitors.make_formula_source( expression='constants[{!r}]'.format(variable_name), formula_name=variable_name, ) if variables_definitions.is_calculee(variable_name): if not variables_definitions.is_calculee(variable_name, kind='base'): log.debug('Variable {!r} is declared in tgvH file but has no formula'.format(variable_name)) return python_source_visitors.make_formula_source( expression='0', formula_name=variable_name, ) assert False, variable_name # Merge variable names coming from dependencies graph and variables definitions # because some variables are missing in tgvH file; # or some constants are declared in tgvH but are not used in formulas, only in verifs. dependencies_by_formula_name = loaders.load_formulas_dependencies() all_variable_names = set(concatv( dependencies_by_formula_name.keys(), concat(dependencies_by_formula_name.values()), variables_definitions.definition_by_variable_name.keys(), constants.keys(), )) write_source_file( file_name='formulas.py', source=Template("""\ from __future__ import division import inspect from ..formulas_helpers import arr, cached, inf, interval, null, positif, positif_ou_nul, present, somme def get_formulas(cache, constants, saisie_variables): formulas = {} $formulas return formulas """).substitute( formulas=textwrap.indent( '\n'.join(map(get_formula_source, sorted(all_variable_names))), prefix=4 * ' ', ), ), ) return 0