def func(epbcs, functions, ebcs, lcbcs): return pipe( fields, get_term(property_array, delta_x), lambda x: [Equation("balance_of_forces", x)], Equations, lambda x: Problem("elasticity", equations=x, functions=functions), do(lambda x: x.time_update(ebcs=ebcs, epbcs=epbcs, lcbcs=lcbcs)), do(lambda x: x.set_solver(get_nls(x.get_evaluator()))), )
def __init__(self, objectives): # pylint: disable=line-too-long super().__init__(None, 'calf') motors = ['eat_and_forward', 'forward', 'dive_and_forward', 'up_and_forward'] eat_and_forward, forward = frozenset([0]), frozenset([1]) dive_and_forward, up_and_forward = frozenset([2]), frozenset([3]) motors_to_action = {eat_and_forward: 'eat_and_forward', forward: 'forward', dive_and_forward: 'dive_and_forward', up_and_forward: 'up_and_forward', '*': '-'} motor_model = MotorModel(motors_to_action) self.network = N = Network(None, objectives) self.status = N.get_NEEDs() self.status_history = {'energy':[]} s1 = N.add_SENSOR_node(Squid) s2 = N.add_SENSOR_node(Song) self.network_model = NetworkModel({frozenset([]): 'no_sensors', frozenset([s1]): 'squid', frozenset([s2]): 'song', frozenset([s1,s2]): 'squid_and_song'}) self.motor_network = M = MotorNetwork(motors, motors_to_action) # NOTE: init=agent_start_pos, using a location here (only for debugging), # is a state when MDP:s are used self.ndp = NetworkDP(calf_start_pos, self.status, motor_model, gamma=.9, network_model=self.network_model) self.q_agent = NetworkQLearningAgent(self.ndp, Ne=0, Rplus=2, alpha=lambda n: 60./(59+n), epsilon=0.2, delta=0.5) # compose applies the functions from right to left self.program = compose(do(partial(l.debug, 'Calf mnetwork.update')) , do(partial(l.debug, M)) , lambda a: do(partial(l.debug, '*** CALF EATING! ***'))(a) if a == 'eat_and_forward' else a , M.update , do(partial(l.debug, 'Calf q_agent')) , self.q_agent , do(partial(l.debug, N)) , lambda p: do(partial(l.debug, '*** CALF HEARD SONG! ***'))(p) if s2 in p[0] else p , lambda p: do(partial(l.debug, '*** CALF FOUND SQUID! ***'))(p) if s1 in p[0] else p , do(partial(l.debug, 'Calf network.update')) , N.update , do(partial(l.debug, 'Calf percept')) )
def display_images(): """ This function obtains results from generators and plot image and image intensity """ vc = setup_camera_and_plot() ims = stream_frames(vc) # Get the generator fig, ax = plt.subplots(1, 2, figsize=(10, 5)) im = setup_plotting(imagestream=ims, imageaxis=ax[0], traceaxis=ax[1]) x_width = 50 starttime = time.time() # Time this try: pipeline = tz.pipe(ims, c.map(lambda x: cv2.cvtColor(x, cv2.COLOR_BGR2RGB)), c.map(c.do(im.set_array)), c.map(lambda x: np.mean(x)), c.sliding_window(x_width)) for n, i in enumerate(pipeline): xdata = np.linspace(n, n + x_width, x_width) plot_intensity(axis=ax[1], xdata=xdata, imageintensity=i) plt.show(block=False) plt.pause(0.001) except KeyboardInterrupt: elapsedtime = time.time() - starttime print('The collection FPS was {:0.2f}'.format(n / elapsedtime)) vc.release()
async def test_fetch_all_by_user(database, make_user, make_heroes): users = make_user(), make_user() for user in users: insert_user(user.dict()) users_heroes = pipe( users, lambda users: {user.id: make_heroes(10, user_id=user.id) for user in users}, do(lambda mapping: [ insert_hero(hero.dict()) for user, heroes in mapping.items() for hero in heroes ]), ) async with database.transaction(): for user in users: user_id = user.id heroes = users_heroes[user_id] result = list(await hero_repository.fetch_all_by_user(user_id)) assert len(result) == len(heroes) for hero in result: assert hero in heroes
def solve(x_data, elastic_modulus, poissons_ratio, macro_strain=1.0, delta_x=1.0): """Solve the elasticity problem Args: x_data: microstructure with shape (n_samples, n_x, ...) elastic_modulus: the elastic modulus in each phase poissons_ration: the poissons ratio for each phase macro_strain: the macro strain delta_x: the grid spacing Returns: a dictionary of strain, displacement and stress with stress and strain of shape (n_samples, n_x, ..., 3) and displacement shape of (n_samples, n_x + 1, ..., 2) """ def solve_one_sample(property_array): return pipe( get_fields(property_array.shape[:-1], delta_x), lambda x: get_problem(x, property_array, delta_x, macro_strain), lambda x: (x, x.solve()), lambda x: get_data(property_array.shape[:-1], *x), ) convert = lambda x: _convert_properties( len(x.shape) - 1, elastic_modulus, poissons_ratio)[x] solve_multiple_samples = sequence( do(_check(len(elastic_modulus), len(poissons_ratio))), convert, map_(solve_one_sample), lambda x: zip(*x), map_(np.array), lambda x: zip(("strain", "displacement", "stress"), tuple(x)), dict, ) shape = lambda x: (x.shape[0], ) + x.shape[1:] + (3, ) dis_shape = lambda x: (x.shape[0], ) + tuple(y + 1 for y in x.shape[1:]) + (2, ) if isinstance(x_data, np.ndarray): return solve_multiple_samples(x_data) return apply_dict_func( solve_multiple_samples, x_data, dict(strain=shape(x_data), stress=shape(x_data), displacement=dis_shape(x_data)), )
def get_problem(u_field, v_field, calc_stiffness, calc_prestress, delta_x): """Get the problem Args: u_field: the displacement field v_field: the test function field calc_stiffness: a functioin to calcuate the stiffness tensor calc_prestress: a function to calculate the prestress tensor delta_x: the mesh spacing Returns: the Sfepy problem """ return pipe( get_terms(u_field, v_field, calc_stiffness, calc_prestress), lambda x: Equation("balance_of_forces", Terms([x[0], x[1]])), lambda x: Problem("elasticity", equations=Equations([x])), do(lambda x: x.time_update(ebcs=get_bcs(v_field.field.region.domain, delta_x))), do(lambda x: x.set_solver(get_nls(x.get_evaluator()))), )
def step_func(params): """Generate the steping function Args: params: the parameter dictionary Returns: a function to do a time step """ return rcompose( do(lambda x: output_step(x) if params["output"] else None), update_dict( dict(sup=lambda **x: do(lambda x: x.updateOld())(x["sup"]), cupric=lambda **x: do(lambda x: x.updateOld())(x["cupric"]), theta=lambda **x: dict(new=x["theta"]["new"], old=x["theta"]["new"]), steps=lambda **x: x["steps"] + 1, sweeps=lambda **x: 0, eta=lambda **x: x["eta"], data=update_data)), iterate_(sweep_func(params), params["max_sweeps"]))
def to_doi_csv(csvfile): """Generate DOI csv file from simulation records This function reads from the meta.yaml's and writes to doi.csv Args: csvfile: the CSV file to write to Returns: a dictionary with file names as keys and values as dictionaries of the updated YAML data. """ return pipe(get_yaml_data(), dict, itemmap(mapping_func), do(write_csv_data(csvfile)))
def ipython_display(specs): """Run publish_display_data for the JS and HTML Args: specs: a list of Vega specs """ pipe( specs, map(lambda x: (uuid.uuid4(), vega.Vega(x))), list, do(html_publish_map), map(tlam(js_publish)), list )
def calc_contour(arr, param_lx, param_nx): """Get contour points for 0.5 contour """ return pipe( param_lx / param_nx, lambda dx: np.linspace( -dx * (param_nx / 2 - 0.5), dx * (param_nx / 2 - 0.5), param_nx ), lambda x: np.meshgrid(x, x), lambda x: plt.contour(x[0], x[1], arr.reshape(param_nx, param_nx), (0.5,)) .collections[0] .get_paths()[0] .vertices, do(lambda x: plt.close()), )
def migrate(func): """Migrate the meta.yaml files using a callback function. This function reads and writes to the meta.yaml files. Args: func: the callback function which takes the YAML dictionary and returns a new dictionary Returns: a dictionary with file names as keys and values as dictionaries of the updated YAML data. """ return pipe(get_yaml_data(), dict, valmap(func), do(itemmap(lambda x: write_yaml_data(*x))))
def get_var(params, ini, inf): """Make a variable with constraint Args: params: the parameter dictionary ini: initial value inf: far field value Returns: the variable """ return pipe( params, lambda x: get_mesh(x["nx"], x["delta"]), lambda x: fipy.CellVariable(x, value=ini, hasOld=True), do(lambda x: x.constrain(inf, where=x.mesh.facesRight)), )
def __init__(self, objectives, landmarks): # pylint: disable=line-too-long, too-many-locals super().__init__(None, 'grid_agent') N = Network(None, objectives) SENSOR = N.add_SENSOR_node self.status = N.get_NEEDs() self.status_history = {'energy': [], 'water': []} # Create sensors SENSOR(Water) SENSOR(Energy) # create one SENSOR for each square sensor_dict = {} for lm in landmarks: sensor_dict[frozenset([SENSOR(Landmark, lm)])] = lm network_model = NetworkModel(sensor_dict) M = MotorNetwork(motors, motors_to_action) # NOTE: init=agent_start_pos, using a location here (only for debugging), # is a state when MDP:s are used self.ndp = NetworkDP(agent_start_pos, self.status, motor_model, .9, network_model) self.q_agent = NetworkQLearningAgent(self.ndp, Ne=0, Rplus=2, alpha=lambda n: 60. / (59 + n), epsilon=0.2, delta=0.5) # compose applies the functions from right to left self.program = compose( do(partial(l.debug, 'mnetwork.update')), M.update, do(partial(l.debug, 'q_agent')), self.q_agent, do(partial(l.debug, N)), do(partial(l.debug, 'network.update')), N.update, do(partial(l.debug, 'percept')), lambda x: do(partial(l.debug, '*** ENERY FOUND ***'))(x) if 'energy' in x[1] and x[1]['energy'] > 0.0 else x, lambda x: do(partial(l.debug, '*** WATER FOUND ***'))(x) if 'water' in x[1] and x[1]['water'] > 0.0 else x, do(self.printU))
def annotate_bed_stream(bed_stream, bam_path, cutoff=10, extension=0, contig_prefix='', bp_threshold=17000): """Annotate all intervals from a BED-file stream. Yields tuple data for each interval with calculated coverage and completeness. Args: bed_stream (sequence): usually a BED-file handle to read from bam_path (str): path to BAM-file cutoff (int, optional): threshold for completeness calculation, defaults to 10 extension (int, optional): number of bases to extend each interval with (+/-), defaults to 0 contig_prefix (str, optional): rename contigs by prefixing, defaults to empty string bp_threshold (int, optional): optimization threshold for reading BAM-file in chunks, default to 17000 Yields: tuple: :class:`chanjo.BaseInterval`, coverage (float), and completeness (float) """ # setup: connect to BAM-file bam = BamFile(bam_path) # the pipeline return pipe( bed_stream, filter(complement(comment_sniffer)), # filter out comments map(text_type.rstrip), # strip invisble chars. map(prefix(contig_prefix)), # prefix to contig map(split(sep='\t')), # split lines map(do(validate_bed_format)), # check correct format map(lambda row: bed_to_interval(*row)), # convert to objects map(extend_interval(extension=extension)), # extend intervals group_intervals(bp_threshold=bp_threshold), # group by threshold map(process_interval_group(bam)), # read coverage concat, # flatten list of lists map(calculate_metrics(threshold=cutoff)) # calculate cov./compl. )
def as_attribute(ob, name=None): """Decorator to define an object as an attribute of another object. Parameters ---------- ob : any The object to attach this to. name : str, optional The name of the attribute. By default this is the decorated value's ``__name__``. Returns ------- dec : callable[any, any] Decorator that registers an object as an attribute of another object and returns it unchanged. """ return do(lambda f: setattr(ob, f.__name__ if name is None else name, f))
def render_templates(paths): contents = _.pipe( paths, _.map(lambda p: Path(p).expanduser()), _.map(lambda p: p.read_text()), tuple, ) if not contents: contents = [sys.stdin.read()] _.pipe( contents, _.map(markdown.meta_yaml.meta_and_content), __.vmap(lambda d, c: jinja2.Environment().from_string(c).render(**d)), _.map(_.do(print)), tuple, )
def __init__(self, objectives): # pylint: disable=line-too-long, too-many-locals # program=None super().__init__(None, 'mom') # Motors and actions motors = ['sing_eat_and_forward', 'forward', 'dive_and_forward', 'up_and_forward'] sing_eat_and_forward, forward = frozenset([0]), frozenset([1]) dive_and_forward, up_and_forward = frozenset([2]), frozenset([3]) motors_to_action = {sing_eat_and_forward: 'sing_eat_and_forward', forward: 'forward', dive_and_forward: 'dive_and_forward', up_and_forward: 'up_and_forward', '*': '-'} motor_model = MotorModel(motors_to_action) self.network = N = Network(None, objectives) self.status = N.get_NEEDs() self.status_history = {'energy':[]} s1 = N.add_SENSOR_node(Squid) self.network_model = NetworkModel({frozenset(): 'no_sensors', frozenset([s1]): 'squid'}) self.motor_network = M = MotorNetwork(motors, motors_to_action) # NOTE: init=agent_start_pos, using a location here (only for debugging), # is a state when MDP:s are used self.ndp = NetworkDP(mom_start_pos, self.status, motor_model, gamma=.9, network_model=self.network_model) self.q_agent = NetworkQLearningAgent(self.ndp, Ne=0, Rplus=2, alpha=lambda n: 60./(59+n), epsilon=0.2, delta=0.5) # compose applies the functions from right to left self.program = compose(do(partial(l.debug, 'Mom mnetwork.update')) , do(partial(l.debug, M)) , M.update , do(partial(l.debug, 'Mom q_agent')) , self.q_agent , do(partial(l.debug, N)) , do(partial(l.debug, 'Mom network.update')) , N.update , do(partial(l.debug, 'Mom percept')) )
def sweep_func(params): """Do a sweep and update the variables Args: params: the parameter dictionary Returns: a function that modifies value dictionary """ return rcompose( update_dict( dict(sup=sup_eqn(params), cupric=cupric_eqn(params), theta=theta_eqn(params), steps=lambda **x: (x["steps"], 0.0), sweeps=lambda **x: (x["sweeps"] + 1, 0.0), eta=lambda **x: (calc_eta(params, x["steps"]), 0.0), data=lambda **x: (x["data"], 0.0))), do(lambda x: output_sweep(x) if params["output"] else None), valmap(first))
def get_vars(params, set_eta, mesh): """Get the variables Args: params: the parameter dict set_eta: function to set the initial value of the phase field mesh: the FiPy mesh Returns: a dictionary of the variables (eta, d2f) """ return pipe( dict( eta=fp.CellVariable(mesh=mesh, hasOld=True, value=params["eta0"], name="eta"), d2f=fp.FaceVariable(mesh=mesh, name="d2f"), ), do(set_eta(params, mesh)), )
def _solve_fe(x_data, elastic_modulus, poissons_ratio, macro_strain=1.0, delta_x=1.0): def solve_one_sample(property_array): return pipe( get_fields(property_array.shape[:-1], delta_x), lambda x: get_problem(x, property_array, delta_x, macro_strain), lambda x: (x, x.solve()), lambda x: get_data(property_array.shape[:-1], *x), ) convert = lambda x: _convert_properties( len(x.shape) - 1, elastic_modulus, poissons_ratio)[x] solve_multiple_samples = sequence( do(_check(len(elastic_modulus), len(poissons_ratio))), convert, map_(solve_one_sample), lambda x: zip(*x), map_(np.array), lambda x: zip(("strain", "displacement", "stress"), tuple(x)), dict, ) shape = lambda x: (x.shape[0], ) + x.shape[1:] + (3, ) dis_shape = lambda x: (x.shape[0], ) + tuple(y + 1 for y in x.shape[1:]) + (2, ) if isinstance(x_data, np.ndarray): return solve_multiple_samples(x_data) return apply_dict_func( solve_multiple_samples, x_data, dict(strain=shape(x_data), stress=shape(x_data), displacement=dis_shape(x_data)), )
def main(folder, **params): # pragma: no cover """Run the calculation Args: params: the list of parameters iterations: the number of iterations Returns: tuple of strain, displacement and stress """ if os.path.exists(folder): click.echo("{0} directory already exists, remove to continue".format(folder)) else: os.makedirs(folder) output_iter = sequence( iterate_(one_iter(params), params["output_frequency"]), do(dump_data(folder, params)), ) pipe( dict(e11=0.0, e12=0.0, e22=0.0, eta=None, step_counter=0), iterate_(output_iter, params["iterations"] // params["output_frequency"]), )
def __init__(self): # pylint: disable=line-too-long super().__init__(None, 'calf') N = Network(None, {'energy': 1.0}) self.status = N.get_NEEDs() self.status_history = {'energy': []} s1 = N.add_SENSOR_node(Squid) r1 = N.add_RAND_node(0.3) r2 = N.add_RAND_node(0.3) r3 = N.add_RAND_node(0.3) s2 = N.add_SENSOR_node(Song) M = MotorNetwork(motors, motors_to_action) state_to_motor = { frozenset([r1, r2, r3]): forward, frozenset([r1, r2]): forward, frozenset([r1, r3]): forward, frozenset([r2, r3]): forward, frozenset([r2]): forward, frozenset([r3]): up_and_forward, frozenset([r1]): up_and_forward, frozenset([]): dive_and_forward } # compose applies the functions from right to left self.program = compose( do(partial(l.debug, 'Calf mnetwork.update')), M.update, do(partial(l.debug, 'Calf state_to_motor')), lambda p: eat_and_forward if s1 in p[0] else (dive_and_forward if s2 in p[0] else up_and_forward) #, lambda s: eat_and_forward if s1 in s else (dive_and_forward if s2 in s else state_to_motor.get(s)) , lambda p: do(partial(l.info, '--- CALF HEARD SONG, DIVING! ---')) (p) if s2 in p[0] else p, lambda p: do(partial(l.info, '--- CALF FOUND SQUID, EATING! ---')) (p) if s1 in p[0] else p, do(partial(l.debug, 'Calf network.update')), N.update, do(partial(l.debug, 'Calf percept')))
def apply_bed_stream(bed_stream, bam_path, fn, extension=0, contig_prefix='', bp_threshold=17000): """Maps a function to all intervals of a BED stream Args: bed_stream (sequence): usually a BED-file handle to read from bam_path (str): path to BAM-file fn: function that takes a list of intervals and read depths and computes a summary statistic over them. See annotator.stages.calculate_metrics for an example. cutoff (int, optional): threshold for completeness calculation, defaults to 10 extension (int, optional): number of bases to extend each interval with (+/-), defaults to 0 contig_prefix (str, optional): rename contigs by prefixing, defaults to empty string bp_threshold (int, optional): optimization threshold for reading BAM-file in chunks, default to 17000 """ # setup: connect to BAM-file bam = BamFile(bam_path) # the pipeline return pipe( bed_stream, filter(complement(comment_sniffer)), # filter out comments map(text_type.rstrip), # strip invisble chars. map(prefix(contig_prefix)), # prefix to contig map(split(sep='\t')), # split lines map(do(validate_bed_format)), # check correct format map(lambda row: bed_to_interval(*row)), # convert to objects map(extend_interval(extension=extension)), # extend intervals group_intervals(bp_threshold=bp_threshold), # group by threshold map(process_interval_group(bam)), # read coverage concat, # flatten list of lists map(fn) # map provided function )
def __init__(self): # pylint: disable=line-too-long, too-many-locals super().__init__(None, 'mom') N = Network(None, {'energy': 1.0}) self.status = N.get_NEEDs() self.status_history = {'energy': []} M = MotorNetwork(motors, motors_to_action) SENSOR, RAND, AND = N.add_SENSOR_node, N.add_RAND_node, N.add_AND_node NOT, OR = N.add_NOT_node, N.add_OR_node s1, r1, r2, r3 = SENSOR(Squid), RAND(0.3), RAND(0.3), RAND(0.3) n3 = AND([ NOT([s1]), OR([AND([r1, NOT([r2, r3])]), AND([r3, NOT([r1, r2])])]) ]) n4 = AND([NOT([s1]), NOT([r1, r2, r3])]) n2 = NOT([s1, n3, n4]) state_to_motor = { frozenset([s1]): sing_eat_and_forward, frozenset([n2]): forward, frozenset([n3]): dive_and_forward, frozenset([n4]): up_and_forward } l.info('state_to_motor:', state_to_motor) l.info('motors_to_action:', motors_to_action) # compose applies the functions from right to left self.program = compose( do(partial(l.debug, 'Mom mnetwork.update')), M.update, do(partial(l.debug, 'Mom state_to_motor')), lambda p: state_to_motor.get(p[0]), do(partial(l.debug, N)), do(partial(l.debug, 'Mom filter interesting states')), lambda p: (p[0] & {s1, n2, n3, n4}, p[1]), do(partial(l.debug, 'Mom network.update')), N.update, do(partial(l.debug, 'Mom percept')))
date=on_date.strftime("%Y%m%d"), zipcode=zipcode) api_query_args = getargspec(request_url).args def get_json_or_raise(response): """Recover the json payload or raise an exception :rtype: dict :param response: request.Response :return: json payload """ if response.status_code != 200: raise APIRequestFailed(response.status_code) data = response.json() error = data.get('response', {}).get('error') if error: raise APIRequestFailed(error) else: return data query_api = compose( get_json_or_raise, do( compose(log.info, "Received response with status {}".format, attribute('status_code'))), requests.get, do(compose(log.info, "Sending request to {}".format)), request_url)
def md_to_latex(text): return _.pipe( text, larc.shell.shell_pipe('pandoc -t latex'), _.do(print), )
return params["k_plus"] * left(sup) + params["k_minus"] * (calc_j0( params, eta, cupric) + theta["new"] * calc_j1(params, eta, cupric)) @memoize def new_value(): """Calculate and cache the new value """ return expression1() / (1 + params["dt"] * expression2()) return (dict(new=new_value(), old=theta["old"]), abs(new_value() - theta["new"])) GET_MASK = rcompose( lambda x: fipy.CellVariable(mesh=x), do(lambda x: x.setValue(1, where=x.mesh.x < x.mesh.dx)), ) def get_eqn(mesh, diff): """Generate a generic 1D diffusion equation with flux from the left Args: mesh: the mesh diff: the diffusion coefficient Returns: a tuple of the flux and the equation """ flux = fipy.CellVariable(mesh, value=0.) eqn = fipy.TransientTerm() == fipy.DiffusionTerm(
def main(files): list(map(z.comp(convert_and_update, z.do(print)), files))
print(f"inc: x = {x}") return x+1 @dataclass class Log: """Stateful class.""" _log: list = field(default_factory=list) def append(self, obj): """Change state of class.""" print(f"Log.append: obj = {obj}") self._log.append(obj) log = Log() inc = compose(inc, do(log.append)) inc(1) inc(11) print(f"log = {log}") #%% # # Pipe. # from toolz.curried import pipe, flip double = lambda i: 2 * i
def do(self, func, *args, **kwargs): return self.pipe(do(func), *args, **kwargs)
def main4(user_id): return (NoneMonad(user_id).bind(get_user).bind( do(send_message2(msg="Hello user."))).bind(log))
from .api import WAIT, query_api # NOQA from .schema import WeatherUndergroundAPIResponse from .processing import process_response, extract_observations, process_metadata # NOQA from .models import WeatherUndergroundObservation, WeatherUndergroundObservationSchema # NOQA log = logging.getLogger(__name__) get_observations = compose( # extract observations from api extract_observations, # get observations from payload get(0), # drop the deserialization errors WeatherUndergroundAPIResponse().load, # deserialize api response query_api # query the api ) collect_data = compose( # create observation models from api response do(compose(log.info, "Created {} observations".format, len)), process_response, # create observations models fapply(map), # merge metadata into each observation juxt(process_metadata, get_observations) # query params as metadata ) def collect_many(api_key, on_dates, zipcodes, t): """Collect data over many dates and zipcodes :param api_key: str weather underground api key :param on_dates: list of dates :param zipcodes: list of zipcodes :param t: float delay between api calls :return: list of observations """