def _get_dashboard_info(doc): params = ["no_of_packages", "weight_actual", "goods_value"] fields = list( concat([["SUM({t}_{p}) AS {t}_{p}".format(t=t, p=p) for p in params] for t in ["on_load", "off_load"]])) data = frappe.db.sql( """ SELECT {fields} FROM `tabLoading Operation` WHERE docstatus = 1 AND shipping_order = %(shipping_order)s """.format(fields=", ".join(fields)), values={"shipping_order": doc.name}, as_dict=1, )[0] def get_values(_type): fields = list(map(lambda x: "{}_{}".format(_type, x), params)) _get = compose( valmap(lambda x: x or 0), keymap(lambda x: x.replace("{}_".format(_type), "")), keyfilter(lambda x: x in fields), ) return _get(data) on_load = get_values("on_load") off_load = get_values("off_load") current = merge({}, *[{x: on_load[x] - off_load[x]} for x in params]) return { "on_load": on_load, "off_load": off_load, "current": current, "history": get_history(doc.name), }
def atop(func, out, out_ind, *args, **kwargs): """ Array object version of dask.array.top """ dtype = kwargs.get('dtype', None) arginds = list(partition(2, args)) # [x, ij, y, jk] -> [(x, ij), (y, jk)] numblocks = dict([(a.name, a.numblocks) for a, ind in arginds]) argindsstr = list(concat([(a.name, ind) for a, ind in arginds])) dsk = top(func, out, out_ind, *argindsstr, numblocks=numblocks) # Dictionary mapping {i: 3, j: 4, ...} for i, j, ... the dimensions shapes = dict((a.name, a.shape) for a, _ in arginds) nameinds = [(a.name, i) for a, i in arginds] dims = broadcast_dimensions(nameinds, shapes) shape = tuple(dims[i] for i in out_ind) blockdim_dict = dict((a.name, a.blockdims) for a, _ in arginds) blockdimss = broadcast_dimensions(nameinds, blockdim_dict) blockdims = tuple(blockdimss[i] for i in out_ind) dsks = [a.dask for a, _ in arginds] return Array(merge(dsk, *dsks), out, shape, blockdims=blockdims, dtype=dtype)
def iterator_to_json_lines(j, seq, dshape=None, encoding='utf-8', **kwargs): row = next(seq) seq = concat([[row], seq]) if not isinstance(row, (dict, str)) and isinstance(row, Iterable): seq = tuples_to_records(dshape, seq) lines = (json.dumps(item, default=json_dumps) for item in seq) # Open file if j.path.split(os.path.extsep)[-1] == 'gz': f = gzip.open(j.path, 'ab') lines2 = (line.encode(encoding) for line in lines) endl = b'\n' else: f = open(j.path, 'a') lines2 = lines endl = '\n' for line in lines2: f.write(line) f.write(endl) f.close() return j
def __init__(self, spec=None): initial = spec['initial'] graph = spec['graph'] self.actions = sorted(set(tz.concat(c.keys() for c in graph.values()))) n_states = len(graph) self.action_idx = {a: i for i, a in enumerate(self.actions)} n_actions = len(self.actions) initial_state = np.zeros(n_states) initial_state[initial] = 1 # P[s][a] == [(probability, nextstate, reward, done), ...] # By default, an action has no effect. P = { s: {a: [(1, s, 0, False)] for a in range(n_actions)} for s in range(n_states) } for s, choices in graph.items(): for a, (r, s1) in choices.items(): done = not graph[s1] a = self.action_idx[a] P[int(s)][a] = [(1, int(s1), r, done)] super().__init__(n_states, n_actions, P, initial_state)
def chunk_exp_Q(env_type, seed, chunk_i): name = f'{env_type}_{seed}' os.makedirs(f'data/exp_Q/{name}', exist_ok=True) polfile = f'data/policies/{name}.pkl' pol = load(polfile) chunk = STATE_CHUNKS[chunk_i] env = make_env('constant_high', cost=COST, term_belief=True, ground_truth=False) def Q(state): for action in env.actions(state): if action == env.term_action: q = env.expected_term_reward(state) else: samples = [] for _ in range(1000): env._state = state env.init, r, *_ = env.step(action) samples.append(r + sum(run_episode(pol, env)['rewards'])) q = np.mean(samples) yield {'state': state, 'action': action, 'q': q} pd.DataFrame(list(concat(map( Q, chunk)))).to_pickle(f'data/exp_Q/{name}/{chunk_i}.pkl')
def split_at_breaks(array, breaks, axis=0): """ Split an array into a list of arrays (using slices) at the given breaks >>> split_at_breaks(np.arange(6), [3, 5]) [array([0, 1, 2]), array([3, 4]), array([5])] """ padded_breaks = concat([[None], breaks, [None]]) slices = [slice(i, j) for i, j in sliding_window(2, padded_breaks)] preslice = (slice(None),) * axis split_array = [array[preslice + (s,)] for s in slices] return split_array
def split_at_breaks(array, breaks, axis=0): """ Split an array into a list of arrays (using slices) at the given breaks >>> split_at_breaks(np.arange(6), [3, 5]) [array([0, 1, 2]), array([3, 4]), array([5])] """ padded_breaks = concat([[None], breaks, [None]]) slices = [slice(i, j) for i, j in sliding_window(2, padded_breaks)] preslice = (slice(None), ) * axis split_array = [array[preslice + (s, )] for s in slices] return split_array
def limit_layers(max_count, graphs): assert max_count > 0, "max count needs to > 0" graphs_iterator = iter(graphs) return tlz.concat([ tlz.take(max_count - 1, graphs_iterator), # Merges all graphs remaining in the iterator, after initial # max_count - 1 have been taken. (lambda: (yield merge_graphs(graphs_iterator)))() ])
def standardize(genotype: Genotype): if len(genotype.normal[0]) == 2: assert all(len(c) == 2 for c in genotype.normal) n = len(genotype.normal) // 2 op_indices = concat([(i, i) for i in range(2, 2 + n)]) op_names, indices = zip(*genotype.normal) normal = list(zip(op_names, op_indices, indices)) assert all(len(c) == 2 for c in genotype.reduce) n = len(genotype.reduce) // 2 op_indices = concat([(i, i) for i in range(2, 2 + n)]) op_names, indices = zip(*genotype.reduce) reduce = list(zip(op_names, op_indices, indices)) else: normal = genotype.normal reduce = genotype.reduce normal = sorted(normal, key=lambda c: (c[1], c[2])) reduce = sorted(reduce, key=lambda c: (c[1], c[2])) return Genotype( normal=normal, normal_concat=genotype.normal_concat, reduce=reduce, reduce_concat=genotype.reduce_concat)
def broadcast_dimensions(argpairs, numblocks, sentinels=(1, (1, ))): """ Find block dimensions from arguments Parameters ---------- argpairs: iterable name, ijk index pairs numblocks: dict maps {name: number of blocks} sentinels: iterable (optional) values for singleton dimensions Examples -------- >>> argpairs = [('x', 'ij'), ('y', 'ji')] >>> numblocks = {'x': (2, 3), 'y': (3, 2)} >>> broadcast_dimensions(argpairs, numblocks) {'i': 2, 'j': 3} Supports numpy broadcasting rules >>> argpairs = [('x', 'ij'), ('y', 'ij')] >>> numblocks = {'x': (2, 1), 'y': (1, 3)} >>> broadcast_dimensions(argpairs, numblocks) {'i': 2, 'j': 3} Works in other contexts too >>> argpairs = [('x', 'ij'), ('y', 'ij')] >>> d = {'x': ('Hello', 1), 'y': (1, (2, 3))} >>> broadcast_dimensions(argpairs, d) {'i': 'Hello', 'j': (2, 3)} """ # List like [('i', 2), ('j', 1), ('i', 1), ('j', 2)] L = concat([ zip(inds, dims) for (x, inds), (x, dims) in join(first, argpairs, first, numblocks.items()) ]) g = groupby(0, L) g = dict((k, set([d for i, d in v])) for k, v in g.items()) g2 = dict( (k, v - set(sentinels) if len(v) > 1 else v) for k, v in g.items()) if g2 and not set(map(len, g2.values())) == set([1]): raise ValueError("Shapes do not align %s" % g) return valmap(first, g2)
def broadcast_dimensions(argpairs, numblocks, sentinels=(1, (1,))): """ Find block dimensions from arguments Parameters ---------- argpairs: iterable name, ijk index pairs numblocks: dict maps {name: number of blocks} sentinels: iterable (optional) values for singleton dimensions Examples -------- >>> argpairs = [('x', 'ij'), ('y', 'ji')] >>> numblocks = {'x': (2, 3), 'y': (3, 2)} >>> broadcast_dimensions(argpairs, numblocks) {'i': 2, 'j': 3} Supports numpy broadcasting rules >>> argpairs = [('x', 'ij'), ('y', 'ij')] >>> numblocks = {'x': (2, 1), 'y': (1, 3)} >>> broadcast_dimensions(argpairs, numblocks) {'i': 2, 'j': 3} Works in other contexts too >>> argpairs = [('x', 'ij'), ('y', 'ij')] >>> d = {'x': ('Hello', 1), 'y': (1, (2, 3))} >>> broadcast_dimensions(argpairs, d) {'i': 'Hello', 'j': (2, 3)} """ # List like [('i', 2), ('j', 1), ('i', 1), ('j', 2)] L = concat([zip(inds, dims) for (x, inds), (x, dims) in join(first, argpairs, first, numblocks.items())]) g = groupby(0, L) g = dict((k, set([d for i, d in v])) for k, v in g.items()) g2 = dict((k, v - set(sentinels) if len(v) > 1 else v) for k, v in g.items()) if g2 and not set(map(len, g2.values())) == set([1]): raise ValueError("Shapes do not align %s" % g) return valmap(first, g2)
def elemwise(op, *args, **kwargs): """ Apply elementwise function across arguments Respects broadcasting rules >>> elemwise(add, x, y) # doctest: +SKIP >>> elemwise(sin, x) # doctest: +SKIP See also: atop """ name = kwargs.get('name') or next(names) out_ndim = max( len(arg.shape) if isinstance(arg, Array) else 0 for arg in args) expr_inds = tuple(range(out_ndim))[::-1] arrays = [arg for arg in args if isinstance(arg, Array)] other = [(i, arg) for i, arg in enumerate(args) if not isinstance(arg, Array)] if 'dtype' in kwargs: dt = kwargs['dtype'] elif not all(a._dtype is not None for a in arrays): dt = None else: vals = [ np.empty( (1, ) * a.ndim, dtype=a.dtype) if hasattr(a, 'dtype') else a for a in args ] try: dt = op(*vals).dtype except AttributeError: dt = None if other: op2 = partial_by_order(op, other) else: op2 = op return atop(op2, name, expr_inds, *concat((a, tuple(range(a.ndim)[::-1])) for a in arrays), dtype=dt)
def _get_columns(filters): join = compose(list, concatv) return join( [ { "fieldtype": "Data", "fieldname": "item_description", "label": "Description", "width": 240, }, { "fieldtype": "Link", "fieldname": "booking_order", "options": "Booking Order", "label": "Booking Order", "width": 120, }, { "fieldtype": "Data", "fieldname": "consignor_name", "label": "Consignor Name", "width": 180, }, { "fieldtype": "Data", "fieldname": "consignee_name", "label": "Consignee Name", "width": 180, }, ], concat([ { "fieldtype": "Int", "fieldname": "{}__no_of_packages".format(x), "label": "{} Packages".format(x), "width": 90, }, { "fieldtype": "Float", "fieldname": "{}__weight_actual".format(x), "label": "{} Weight".format(x), "width": 90, }, ] for x in activities), )
def elemwise(op, *args, **kwargs): """ Apply elementwise function across arguments Respects broadcasting rules >>> elemwise(add, x, y) # doctest: +SKIP >>> elemwise(sin, x) # doctest: +SKIP See also: atop """ name = kwargs.get('name') or next(names) out_ndim = max(len(arg.shape) if isinstance(arg, Array) else 0 for arg in args) expr_inds = tuple(range(out_ndim))[::-1] arrays = [arg for arg in args if isinstance(arg, Array)] other = [(i, arg) for i, arg in enumerate(args) if not isinstance(arg, Array)] if 'dtype' in kwargs: dt = kwargs['dtype'] elif not all(a._dtype is not None for a in arrays): dt = None else: vals = [np.empty((1,) * a.ndim, dtype=a.dtype) if hasattr(a, 'dtype') else a for a in args] try: dt = op(*vals).dtype except AttributeError: dt = None if other: op2 = partial_by_order(op, other) else: op2 = op return atop(op2, name, expr_inds, *concat((a, tuple(range(a.ndim)[::-1])) for a in arrays), dtype=dt)
def _(): return concat(convert(chunks(Iterator), js, **kwargs) for js in jsons)
def __dir__(self): """Extend the completer.""" return list( concatv(super().__dir__(), dir(self.estimator), concat(map(dir, self.env.extensions.values()))))
def glob(self, root, recursive=True): # walk: root [dirs] [files] -> for each dir (tup.2), for # each file (tup.1), we need to join with root return pipe(os.walk(expanduser(root)), mapcat(lambda tup: map(lambda f: join(tup[0], f)) (concat([tup[2], tup[1]]))), list) # noqa
def _(): return concat(convert(chunks(list), js, **kwargs) for js in jsons)
def get_history(name): booking_logs = frappe.get_all( "Booking Log", filters={"booking_order": name}, fields=[ "'Booking Log' as doctype", "posting_datetime", "booking_order", "shipping_order", "station", "activity", "loading_operation", "loading_unit", "sum(no_of_packages) as no_of_packages", "sum(weight_actual) as weight_actual", ], order_by="posting_datetime", group_by="posting_datetime,activity", ) get_shipping_logs = compose( concat, map(lambda x: frappe.get_all( "Shipping Log", filters={ "shipping_order": x[0].get("shipping_order"), "activity": ("in", ["Stopped", "Moving"]), "posting_datetime": ( "between", [ x[0].get("posting_datetime"), x[1].get( "posting_datetime") ], ), }, fields=[ "'Shipping Log' as doctype", "posting_datetime", "shipping_order", "station", "activity", ], order_by="posting_datetime", ) if x[0].get("shipping_order") else []), sliding_window(2), ) shipping_logs = get_shipping_logs( booking_logs + [{ "posting_datetime": frappe.utils.now() }]) def get_message(log): if log.get("doctype") == "Booking Log": if log.get("loading_unit") == "Weight": return "{} {} units by weight at {}".format( log.get("activity"), abs(log.get("weight_actual")), log.get("station"), ) return "{} {} packages at {}".format( log.get("activity"), abs(log.get("no_of_packages")), log.get("station")) if log.get("doctype") == "Shipping Log": prepo = "to" if log.get("activity") == "Moving" else "at" return "{} {} {}".format(log.get("activity"), prepo, log.get("station")) return "" def get_link(log): if log.get("doctype") == "Shipping Log": return "#Form/Shipping Order/{}".format(log.get("shipping_order")) if log.get("doctype") == "Booking Log" and log.get( "loading_operation"): return "#Form/Loading Operation/{}".format( log.get("loading_operation")) return "" def get_event(log): return { "datetime": log.get("posting_datetime"), "status": log.get("activity"), "message": get_message(log), "link": get_link(log), } return sorted( [get_event(x) for x in concat([booking_logs, shipping_logs])], key=lambda x: frappe.utils.get_datetime(x.get("datetime")), )
def ppipe(iterable, func, chunk_size=1): """Multi-core pipe.""" out = pmap(func, iterable, chunk_size) return list(concat(out))
def main(): parser = argparse.ArgumentParser(description=__doc__) parser.add_argument('-d', '--debug', action='store_true', default=False, help='Display debug messages') parser.add_argument('-v', '--verbose', action='store_true', default=False, help='Increase output verbosity') parser.add_argument('json_dir', help='Directory containing the JSON AST and data files') global args args = parser.parse_args() logging.basicConfig( level=logging.DEBUG if args.debug else (logging.INFO if args.verbose else logging.WARNING), stream=sys.stdout, ) if not os.path.exists(args.json_dir): parser.error('json_dir {!r} does not exist'.format(args.json_dir)) if not os.path.isdir(generated_dir_path): os.mkdir(generated_dir_path) # Initialize a variables_definitions object and set global variable in visitors variables_definitions = python_source_visitors.variables_definitions = VariablesDefinitions() # Transpile verification functions verif_sources = list( mapcat(load_verifs_file, iter_ast_json_file_names(filenames=['coc*.json', 'coi*.json'])) ) verifs_source = Template("""\ from ..formulas_helpers import arr, cached, inf, interval, null, positif, positif_ou_nul, present, somme def get_errors(formulas, saisie_variables): errors = [] $verifs return errors or None """).substitute(verifs=textwrap.indent('\n'.join(verif_sources), prefix=4 * ' ')) write_source_file( file_name='verifs.py', source=verifs_source, ) # Transpile formulas constants = loaders.load_constants() source_by_formula_name = dict(list(mapcat( load_regles_file, iter_ast_json_file_names(filenames=['chap-*.json', 'res-ser*.json']), ))) def get_formula_source(variable_name): source = source_by_formula_name.get(variable_name) if source is not None: return source if variables_definitions.is_saisie(variable_name): return python_source_visitors.make_formula_source( expression='saisie_variables.get({!r}, 0)'.format(variable_name), formula_name=variable_name, ) if variable_name in constants: return python_source_visitors.make_formula_source( expression='constants[{!r}]'.format(variable_name), formula_name=variable_name, ) if variables_definitions.is_calculee(variable_name): if not variables_definitions.is_calculee(variable_name, kind='base'): log.debug('Variable {!r} is declared in tgvH file but has no formula'.format(variable_name)) return python_source_visitors.make_formula_source( expression='0', formula_name=variable_name, ) assert False, variable_name # Merge variable names coming from dependencies graph and variables definitions # because some variables are missing in tgvH file; # or some constants are declared in tgvH but are not used in formulas, only in verifs. dependencies_by_formula_name = loaders.load_formulas_dependencies() all_variable_names = set(concatv( dependencies_by_formula_name.keys(), concat(dependencies_by_formula_name.values()), variables_definitions.definition_by_variable_name.keys(), constants.keys(), )) write_source_file( file_name='formulas.py', source=Template("""\ from __future__ import division import inspect from ..formulas_helpers import arr, cached, inf, interval, null, positif, positif_ou_nul, present, somme def get_formulas(cache, constants, saisie_variables): formulas = {} $formulas return formulas """).substitute( formulas=textwrap.indent( '\n'.join(map(get_formula_source, sorted(all_variable_names))), prefix=4 * ' ', ), ), ) return 0
def as_lines(sequence): return ''.join(concat(zip(sequence, itertools.repeat('\n'))))
def chunk_sum_returns(x, chunk_i): pol = lc_policy(x) chunk = CLICK_CHUNKS[chunk_i] steps = concat(yoked_rollout(pol, *args) for args in chunk) return sum(map(get('q'), steps))
def fetchall_with_sleep(result_proxy): for x in concat(_fetchall_with_sleep(result_proxy)): yield x sleep()