def test_dont_filter_out_non_recently_converged(self): """ If a group was converged in the past but not recently, it will be cleaned from the ``recently_converged`` map, and it will be converged. """ # g1: converged a while ago; divergent -> removed and converged # g2: converged recently; not divergent -> not converged # g3: converged a while ago; not divergent -> removed and not converged eff = self._converge_all_groups(['00_g1']) sequence = [ (ReadReference(ref=self.currently_converging), lambda i: pset([])), (Log('converge-all-groups', dict(group_infos=[self.group_infos[0]], currently_converging=[])), noop), (ReadReference(ref=self.recently_converged), lambda i: pmap({'g1': 4, 'g2': 10, 'g3': 0})), (Func(time.time), lambda i: 20), (ModifyReference(self.recently_converged, match_func("literally anything", pmap({'g2': 10}))), noop), parallel_sequence([[self._expect_group_converged('00', 'g1')]]) ] self.assertEqual(perform_sequence(sequence, eff), ['converged g1!'])
def read_schedule_json(obj): # reconstruct schedule information from json agents = pvector(obj['agents']) costs = pmap(obj['costs']) times = pset(map(as_timerange,obj['times'])) forward = pmap({a: pmap({as_timerange(t): int(t['mid']) for t in obj['meetings'][a] if t['mid'] != -1}) for a in agents}) mids = pset([mid for ts in forward.values() for mid in ts.values()]) # remove the mid 0, which marks an empty meeting (for unavailable times) if 0 in mids: mids = mids.remove(0) # update meetings and their requirements requirements = pmap({int(mid): pmap({r['type']: read_jsonable_requirement(r) for r in rs.values()}) for mid,rs in obj['requirements'].iteritems()}) schedule = Schedule(agents=agents,times=times,forward=forward, requirements=requirements,costs=costs) new_unsatisfied = schedule.unsatisfied for mid,rs in schedule.unsatisfied.iteritems(): for rtype in rs: r = schedule.requirements[mid][rtype] if r.satisfied(schedule): new_unsatisfied = _mark_satisfied(new_unsatisfied,r) elif not r.satisfiable(schedule): raise RequirementException(r) schedule.unsatisfied = new_unsatisfied return schedule
def merge_results(results): """ Given a list of dictionary results from episodes and the interesting keys, merge them into a single dictionary. Example: [{episode_id: 1, steps: 22}, {episode_id: 2, steps: 30}] -> {episode_id: [1, 2], steps: [22, 30]} """ seed_dictionary = pmap({key: v() for key, _ in results[0].items()}) return pmap(reduce(lambda result1, y: {key: value.append(y[key]) for key, value in result1.items()}, [seed_dictionary] + results))
def __init__(self,agents=pvector([]),times=pset([]),forward=pmap({}), costs=pmap({}),requirements=pmap({}),backward=None, unsatisfied=None): self.cache = {} #### schedule bounds self.agents = agents # vector of valid agents self.times = times # set of valid times #### the schedule itself self.forward = forward # agents -> times -> meeting ids # mids -> meeting (time, agents) if backward is None: self.backward = _backward_from_forward(self.forward) else: self.backward = backward #### schedule constraints self.requirements = requirements # mids -> requirement type -> requirement # mids -> requirement type if unsatisfied is None: self.unsatisfied = pmap({mid: pset(self.requirements[mid].keys()) for mid in self.requirements.keys()}) else: self.unsatisfied = unsatisfied self.costs = costs # map from agents to meeting time costs functions
def test_filters_clb_types(self): """ Only one CLB step is returned per CLB """ steps = pbag([ AddNodesToCLB( lb_id='5', address_configs=s(('1.1.1.1', CLBDescription(lb_id='5', port=80)))), RemoveNodesFromCLB(lb_id='5', node_ids=s('1')), # Unoptimizable step CreateServer(server_config=pmap({})), ]) # returned steps could be pbag of any of the 2 lists below depending # on how `one_clb_step` iterates over the steps. Since it is pbag the # order of elements is not guaranteed list1 = [ AddNodesToCLB( lb_id='5', address_configs=s( ('1.1.1.1', CLBDescription(lb_id='5', port=80)))), CreateServer(server_config=pmap({})) ] list2 = [ RemoveNodesFromCLB(lb_id='5', node_ids=s('1')), CreateServer(server_config=pmap({})) ] self.assertEqual( matches(MatchesAny(Equals(pbag(list1)), Equals(pbag(list2)))), optimize_steps(steps) )
def test_returns_new_pmap_given_pmap(self): """ If a PMap is passed in, a new PMap is returned, and even the new value that was passed in gets frozen. """ self.assertEquals(set_in(pmap({1: 2}), (1,), {1: 3}), pmap({1: pmap({1: 3})}))
def setUp(self): self.format = mock.MagicMock(return_value=lambda x: str(x)) self.label_1 = "a" self.label_2 = "b" self.true_positive = 8 self.true_negative = 8 self.false_positive = 8 self.false_negative = 8 self.confusion_table = ConfusionTable( self.label_1, self.true_positive, self.true_negative, self.false_positive, self.false_negative, self.format ) self.predictions = pmap({ self.label_1: pmap({ self.label_1: self.true_positive, self.label_2: self.false_positive, }), self.label_2: pmap({ self.label_1: self.false_negative, self.label_2: self.true_negative }) })
def parse_file(excel_file): excel = pd.ExcelFile(excel_file) df = excel.parse('Schedule',index_col=0) df.columns = clean_up(df.columns) times,agents = parse_schedule(df) df = excel.parse('Meetings',index_col=None) df.columns = clean_up(df.columns) del df['area'] df.name = clean_up(df.name) meetings = parse_student_meetings(df,3) offset = meetings[-1].mid+1 df = excel.parse('Lab Meetings',index_col=None) df.columns = clean_up(df.columns) meetings += parse_lab_meetings(df,offset=offset) df = excel.parse('Schedule Preferences') df.columns = clean_up(df.columns) costs = parse_costs(df) final_meetings = {} for requirement in meetings: old = final_meetings.get(requirement.mid,pset()) final_meetings[requirement.mid] = old.add(requirement) return Schedule(list(agents),pmap(),pmap(times),costs, pmap(final_meetings),pmap())
def test_returns_new_pmap_given_dict(self): """ If a dictionary is passed in, a new PMap is returned and the old dictionary is unaffected. """ a = {1: 2} self.assertEquals(set_in(a, (1,), {1: 3}), pmap({1: pmap({1: 3})})) self.assertEquals(a, {1: 2})
def test_hash_parameters(self): self.assertEqual( { http.MediaRange(type="a", parameters=pmap({"a": "b"})), http.MediaRange(type="a", parameters=pmap({"a": "b"})), }, {http.MediaRange(type="a", parameters=pmap({"a": "b"}))}, )
def __init__(self, nodes=None, this_node_uuid=uuid4()): self._configured_datasets = pmap() self._configured_containers = pmap() self._leases = LeasesModel() if nodes is None: nodes = [] self._nodes = nodes self._this_node_uuid = this_node_uuid self.synchronize_state()
def test_equal_with_different_bucket_sizes(): x = pmap({'a': 1, 'b': 2}, 50) y = pmap({'a': 1, 'b': 2}, 10) assert x == y assert not (x != y) assert y == x assert not (y != x)
def test_set_with_relocation(): x = pmap({'a':1000}, pre_size=1) x = x.set('b', 3000) x = x.set('c', 4000) x = x.set('d', 5000) x = x.set('d', 6000) assert len(x) == 4 assert x == pmap({'a': 1000, 'b': 3000, 'c': 4000, 'd': 6000})
def test_equal_with_different_insertion_order(): x = pmap([(i, i) for i in range(50)], 10) y = pmap([(i, i) for i in range(49, -1, -1)], 10) assert x == y assert not (x != y) assert y == x assert not (y != x)
def __get_basic_average_metrics(self): return pmap({ "average": pmap({ "accuracy": self.average_accuracy, "precision": self.average_precision, "recall": self.average_recall, "specificity": self.average_specificity, "f1score": self.average_f1score }) })
def test_evolver_update_with_relocation(): x = pmap({'a':1000}, pre_size=1) e = x.evolver() e['b'] = 3000 e['c'] = 4000 e['d'] = 5000 e['d'] = 6000 assert len(e) == 4 assert e.persistent() == pmap({'a': 1000, 'b': 3000, 'c': 4000, 'd': 6000})
def log_metrics(self, log_level='basic'): avg_metrics_dict = pmap({ "basic": self.__get_basic_average_metrics, "all": self.__get_all_average_metrics })[log_level]() metrics_dict = pmap({ "basic": self.__get_basic_metrics_for_each_class, "all": self.__get_all_metrics_for_each_class })[log_level]() return self.__format_function(avg_metrics_dict), self.__format_function(metrics_dict)
def __get_basic_metrics_for_class(confusion_table): return pmap({ str(confusion_table.get_class_name()): pmap({ "Accuracy": confusion_table.accuracy, "Precision": confusion_table.precision, "Recall": confusion_table.recall, "Specificity": confusion_table.specificity, "F1score": confusion_table.f1score }) })
def test_same_hash_when_content_the_same_but_underlying_vector_size_differs(): x = pmap(dict((x, x) for x in range(1000))) y = pmap({10: 10, 200: 200, 700: 700}) for z in x: if z not in y: x = x.remove(z) assert x == y assert hash(x) == hash(y)
def compute_check_result_for_job(client, job): kwargs = m( name="check_tron_job.{}".format(job['name']), source="tron", ) if 'realert_every' not in kwargs: kwargs = kwargs.set('realert_every', guess_realert_every(job)) kwargs = kwargs.set('check_every', f"{_run_interval}s") # We want to prevent a monitoring config from setting the check_every # attribute, since one config should not dictate how often this script runs sensu_kwargs = ( pmap(job['monitoring']).discard(PRECIOUS_JOB_ATTR) .discard('check_every') ) kwargs = kwargs.update(sensu_kwargs) kwargs_list = [] if job["status"] == "disabled": kwargs = kwargs.set( 'output', "OK: {} is disabled and won't be checked.".format(job['name'], ) ) kwargs = kwargs.set('status', 0) kwargs_list.append(kwargs) else: # The job is not disabled, therefore we have to look at its run history url_index = client.index() tron_id = get_object_type_from_identifier(url_index, job["name"]) job_content = pmap( client.job( tron_id.url, include_action_runs=True, ) ) if job['monitoring'].get(PRECIOUS_JOB_ATTR, False): dated_runs = sort_runs_by_interval(job_content, interval='day') else: dated_runs = {'': job_content['runs']} for date, runs in dated_runs.items(): results = compute_check_result_for_job_runs( job=job, job_content=job_content.set('runs', runs), client=client, ) dated_kwargs = kwargs.update(results) if date: # if empty date, leave job name alone dated_kwargs = dated_kwargs.set( 'name', f"{kwargs['name']}-{date}" ) kwargs_list.append(dated_kwargs) return [dict(kws) for kws in kwargs_list]
def __str__(self): """ Returns ConfusionTable instance as a Grid """ table_values = pmap({ "true_positive": pmap({str(self.__class_name): self.__true_positive}), "true_negative": pmap({str(self.__class_name): self.__true_negative}), "false_positive": pmap({str(self.__class_name): self.__false_positive}), "false_negative": pmap({str(self.__class_name): self.__false_negative}) }) return self.__format_function(table_values).__str__()
def test_mixed_optimization(self): """ Mixes of optimizable and unoptimizable steps still get optimized correctly. """ steps = pbag([ # CLB adds AddNodesToCLB( lb_id='5', address_configs=s(('1.1.1.1', CLBDescription(lb_id='5', port=80)))), AddNodesToCLB( lb_id='5', address_configs=s(('1.1.1.2', CLBDescription(lb_id='5', port=80)))), AddNodesToCLB( lb_id='6', address_configs=s(('1.1.1.1', CLBDescription(lb_id='6', port=80)))), AddNodesToCLB( lb_id='6', address_configs=s(('1.1.1.2', CLBDescription(lb_id='6', port=80)))), RemoveNodesFromCLB(lb_id='5', node_ids=s('1')), RemoveNodesFromCLB(lb_id='5', node_ids=s('2')), RemoveNodesFromCLB(lb_id='6', node_ids=s('3')), RemoveNodesFromCLB(lb_id='6', node_ids=s('4')), # Unoptimizable steps CreateServer(server_config=pmap({})), ]) self.assertEqual( optimize_steps(steps), pbag([ # Optimized CLB adds AddNodesToCLB( lb_id='5', address_configs=s(('1.1.1.1', CLBDescription(lb_id='5', port=80)), ('1.1.1.2', CLBDescription(lb_id='5', port=80)))), AddNodesToCLB( lb_id='6', address_configs=s(('1.1.1.1', CLBDescription(lb_id='6', port=80)), ('1.1.1.2', CLBDescription(lb_id='6', port=80)))), RemoveNodesFromCLB(lb_id='5', node_ids=s('1', '2')), RemoveNodesFromCLB(lb_id='6', node_ids=s('3', '4')), # Unoptimizable steps CreateServer(server_config=pmap({})) ]))
def test_transform_empty_path(self): """ If ``transform`` is supplied with an empty path, the operation is performed on the root object. """ proxy = _TransformProxy(pmap({'a': 1})) proxy.transform([], lambda o: o.set('a', 2)) self.assertEqual( pmap({'a': 2}), proxy.commit(), )
def test_create_server(self): """ :obj:`CreateServer.as_request` produces a request for creating a server. """ create = CreateServer(launch_config=pmap({'name': 'myserver', 'flavorRef': '1'})) self.assertEqual( create.as_request(), Request( service=ServiceType.CLOUD_SERVERS, method='POST', path='servers', data=pmap({'name': 'myserver', 'flavorRef': '1'})))
def test_generator_returns_confusion_table_correctly_with_two_classes(self): predictions = pmap({ self.label_1: pmap({ self.label_1: self.true_positive, self.label_2: self.false_positive, }), self.label_2: pmap({ self.label_1: self.false_negative, self.label_2: self.true_negative }) }) confusion_table = generate_confusion_table(predictions, self.label_1, format_dict_as_grid) self.failUnlessEqual(self.ct.__str__(), confusion_table.__str__())
def test_from_dict(self): """ L{WrittenMessage.from_dict} converts a dictionary that has been deserialized from a log into a L{WrittenMessage} object. """ log_entry = pmap( {"timestamp": 1, "task_uuid": "unique", "task_level": [1], "foo": "bar"} ) parsed = WrittenMessage.from_dict(log_entry) self.assertEqual(parsed.timestamp, 1) self.assertEqual(parsed.task_uuid, "unique") self.assertEqual(parsed.task_level, TaskLevel(level=[1])) self.assertEqual(parsed.contents, pmap({"foo": "bar"}))
def server(id, state, created=0, image_id='image', flavor_id='flavor', json=None, metadata=pmap(), **kwargs): """Convenience for creating a :obj:`NovaServer`.""" json = pmap(json) or pmap({'id': id, 'status': state.name}) if state is ServerState.UNKNOWN_TO_OTTER: json = json.set('status', 'blargho') elif state is ServerState.DELETED: json = json.set('status', 'ACTIVE') json = json.set('OS-EXT-STS:task_state', 'deleting') if metadata: json = json.set('metadata', pmap(metadata)) return NovaServer(id=id, state=state, created=created, image_id=image_id, flavor_id=flavor_id, json=json, **kwargs)
def test_converge_give_me_multiple_servers(self): """ Multiple servers are added at a time if there are not enough servers to meet the desired capacity. """ self.assertEqual( converge( DesiredGroupState(server_config={}, capacity=2), set(), set(), 0), pbag([ CreateServer(server_config=pmap()), CreateServer(server_config=pmap())]))
def run_multiple_inserts_in_pmap(): from pyrsistent import pmap COUNT = 100000 def test_range(): prime = 317 return range(0, prime*COUNT, prime) elements = {x: x for x in test_range()} # Using ordinary set start = time.time() m1 = pmap(elements) print("Done initalizing, time=%s s, count=%s" % (time.time() - start, COUNT)) start = time.time() m2 = pmap() for x in test_range(): m2 = m2.set(x, x) print("Done setting, time=%s s, count=%s" % (time.time() - start, COUNT)) assert m1 == m2 start = time.time() m3 = pmap() e3 = m3.evolver() for x in test_range(): e3[x] = x m3 = e3.persistent() print("Done evolving, time=%s s, count=%s" % (time.time() - start, COUNT)) assert m3 == m2 start = time.time() m4 = pmap() m4 = m4.update(elements) m4 = m4.update(elements) print("Done updating, time=%s s, count=%s" % (time.time() - start, COUNT)) assert m4 == m3 start = time.time() m5 = pmap() m5 = m5.update_with(lambda l, r: r, elements) m5 = m5.update_with(lambda l, r: r, elements) print("Done updating with, time=%s s, count=%s" % (time.time() - start, COUNT)) assert m5 == m4
def test_converge_give_me_multiple_servers(self): """ Multiple servers are added at a time if there are not enough servers to meet the desired capacity. """ self.assertEqual( converge( DesiredGroupState(launch_config={}, desired=2), [], {}, 0), Convergence( steps=pbag([ CreateServer(launch_config=pmap()), CreateServer(launch_config=pmap())])))
def __init__( self, typ: t.Type[iface.IType], attrs: t.Sequence[str] = pvector([]), deserialize_overrides: PMap[str, str] = pmap({}), unknown: str = 'ignore', ) -> None: """ :param deserialize_overrides: source_field_name => struct_field_name mapping """ super().__init__(unknown) self.typ = typ self.attrs = attrs self.deserialize_overrides = deserialize_overrides # struct_field_name => source_field_name self.serialize_overrides = pmap( {v: k for k, v in self.deserialize_overrides.items()})
def __setstate__(self, d): dat = d['data'] object.__setattr__( self, 'data', ps.pmap({ k: (imm_array(u) if v is None else iquant(u, v)) for (k, (u, v)) in six.iteritems(dat) })) object.__setattr__(self, '_row_count', None)
def test_initialization_with_two_elements(): map = pmap({'a': 2, 'b': 3}) assert len(map) == 2 assert map['a'] == 2 assert map['b'] == 3 map2 = map.remove('a') assert 'a' not in map2 assert map2['b'] == 3
class _Directory(object): """ A directory. """ _name = attr.ib() _parent = attr.ib(repr=False) _children = attr.ib(default=pmap()) @classmethod def root(cls): root = cls(name="", parent=None) root._parent = root return root def __getitem__(self, name): return self._children.get( name, _DirectoryChild(name=name, parent=self), ) def __setitem__(self, name, node): self._children = self._children.set(name, node) def __delitem__(self, name): self._children = self._children.remove(name) def create_directory(self, path, with_parents): raise exceptions.FileExists(path) def list_directory(self, path): return pset(self._children) def remove_empty_directory(self, path): if self._children: raise exceptions.DirectoryNotEmpty(path) del self._parent[self._name] def create_file(self, path): raise exceptions.FileExists(path) def open_file(self, path, mode): raise exceptions.IsADirectory(path) def remove_file(self, path): raise exceptions._UnlinkNonFileError(path) def link(self, source, to, fs, state): raise exceptions.FileExists(to) def readlink(self, path): raise exceptions.NotASymlink(path) def stat(self, path): return os.stat_result((stat.S_IFDIR, ) + (0, ) * 9) lstat = stat
def __add__(self, other_pde): assert self.dim == other_pde.dim res = dict(self.mi_to_coeff) for k, v in other_pde.mi_to_coeff.items(): if k in res: res[k] += v else: res[k] = v return DifferentialOperator(self.dim, pmap(res))
def laplacian(diff_op): dim = diff_op.dim empty = [pmap()] * len(diff_op.eqs) res = LinearPDESystemOperator(dim, *empty) for j in range(dim): mi = [0]*diff_op.total_dims mi[j] = 2 res = res + diff(diff_op, tuple(mi)) return res
def __init__(self, initial=None): # type: (Optional[Mapping[KT, VT]]) -> None self.__parent = None # type: Optional[WeakReference[Storage[KT, VT]]] self.__storages = WeakSet({self}) # type: MutableSet[Storage[KT, VT]] self.__data = cast( "PMapEvolver[WeakReference[KT], VT]", pmap().evolver()) # type: PMapEvolver[WeakReference[KT], VT] if initial is not None: self.__initialize(initial)
def _typed_pmap_converter( init_val: typing.Mapping[str, typing.Callable[["TypeChecker", typing.Any], bool], ], ) -> typing.Mapping[str, typing.Callable[["TypeChecker", typing.Any], bool]]: return typing.cast( typing.Mapping[str, typing.Callable[["TypeChecker", typing.Any], bool], ], pmap(init_val), )
def test_lt_parameters_same_type_and_subtype(self): self.assertLess( http.MediaRange(type="bar", subtype="foo"), http.MediaRange( type="bar", subtype="foo", parameters=pmap({"a": "b"}), ), )
def _load(res): flnm = HCPRetinotopyDataset.retinotopy_files[res] flnm = pseudo_path.local_path(flnm) logging.info( 'HCPRetinotopyDataset: Loading subjects from file %s...' % flnm) with h5py.File(flnm, 'r') as f: sids = np.array(f['subjectids'][0], dtype='int') return pyr.pmap({sid: ii for (ii, sid) in enumerate(sids)})
def test_pmap_field_optional(): """ If ``optional`` argument is true, ``None`` is acceptable alternative to a set. """ class Record(PRecord): value = pmap_field(int, int, optional=True) assert (Record(value={1: 2}).value, Record(value=None).value) == \ (pmap({1: 2}), None)
def test_no_clb_steps(self): """ Returns same steps when there are no CLB steps passed """ steps = [ CreateServer(server_config=pmap({"name": "server"})), DeleteServer(server_id="abc") ] self.assertEqual(list(one_clb_step(steps)), steps)
def reducer(action, state=pmap({})): if action.type == initial_action_type: new_state = pmap({'count': 0}) return new_state if isinstance(action, INC_COUNTER): new_state = state.set('count', state['count'] + 1) return new_state if isinstance(action, DEC_COUNTER): new_state = state.set('count', state['count'] - 1) return new_state if isinstance(action, SET_COUNTER): new_state = state.set('count', action.payload) return new_state if isinstance(action, ADD_COUNTER): new_state = state.set('count', state['count'] + action.payload) return new_state return state
def _make(cls, internal=pmap()): # type: (Type[_DS], PMap[KT, VT]) -> _DS """ Make new state by directly setting the internal state. :param internal: Internal state. :return: State. """ return super(DictState, cls)._make(internal)
def test_normal_use(self): """Tests normal usage.""" stack_config = pmap({'stack_name': 'baz', 'foo': 'bar'}) new_stack_config = pmap({'stack_name': 'baz_foo', 'foo': 'bar'}) self.create = CreateStack(stack_config) self.seq = [ (Func(uuid4), lambda _: 'foo'), (create_stack(thaw(new_stack_config)).intent, lambda _: (StubResponse(200, {}), {'stack': {}})), (Log('request-create-stack', ANY), lambda _: None) ] reason = 'Waiting for stack to create' result = perform_sequence(self.seq, self.create.as_effect()) self.assertEqual(result, (StepResult.RETRY, [ErrorReason.String(reason)]))
def create_directory(self, path): if self.exists(path=path): raise exceptions.FileExists(path) parent = path.parent() if not self.exists(path=parent): raise exceptions.FileNotFound(parent) self._tree = self._tree.set(path, pmap())
def diff(diff_op, mi): eqs = [] for eq in diff_op.eqs: res = {} for deriv_ident, v in eq.items(): new_mi = add_mi(deriv_ident.mi, mi) res[DerivativeIdentifier(new_mi, deriv_ident.vec_idx)] = v eqs.append(pmap(res)) return LinearPDESystemOperator(diff_op.dim, *eqs)
def test_as_dict(self): """ L{WrittenMessage.as_dict} returns the dictionary that will be serialized to the log. """ log_entry = pmap( {"timestamp": 1, "task_uuid": "unique", "task_level": [1], "foo": "bar"} ) self.assertEqual(WrittenMessage.from_dict(log_entry).as_dict(), log_entry)
async def get_balances_ftx( client: ntypes.CLIENT, symbols_resp: NoobitResponseSymbols, # prevent unintentional passing of following args *, logger: typing.Optional[typing.Callable] = None, auth=FtxAuth(), base_url: pydantic.AnyHttpUrl = endpoints.FTX_ENDPOINTS.private.url, endpoint: str = endpoints.FTX_ENDPOINTS.private.endpoints.balances, ) -> Result[NoobitResponseBalances, pydantic.ValidationError]: asset_from_exchange = lambda x: {v: k for k, v in symbols_resp.assets.items()}[x] req_url = "/".join([base_url, "wallet", "balances"]) method = "GET" headers = auth.headers(method, "/api/wallet/balances") valid_ftx_req = Ok(FtxPrivateRequest()) result_content = await get_result_content_from_req( client, method, req_url, valid_ftx_req.value, headers ) if isinstance(result_content, Err): return result_content valid_result_content = _validate_data( FtxResponseBalances, pmap({"balances": result_content.value}) ) if valid_result_content.is_err(): return valid_result_content parsed_result_balances = parse_result(valid_result_content.value) valid_parsed_response_data = _validate_data( NoobitResponseBalances, pmap( { "balances": parsed_result_balances, "rawJson": result_content.value, "exchange": "FTX", } ), ) return valid_parsed_response_data
def __init__(self, log, dispatcher, num_buckets, partitioner_factory, build_timeout, interval, limited_retry_iterations, step_limits, converge_all_groups=converge_all_groups): """ :param log: a bound log :param dispatcher: The dispatcher to use to perform effects. :param int buckets: the number of logical `buckets` which are be shared between all Otter nodes running this service. The buckets will be partitioned up between nodes to detirmine which nodes should work on which groups. :param partitioner_factory: Callable of (all_buckets, log, callback) which should create an :obj:`Partitioner` to distribute the buckets. :param number build_timeout: number of seconds to wait for servers to be in building before it's is timed out and deleted :param interval: Interval between convergence steps, per group. :param callable converge_all_groups: like :func:`converge_all_groups`, to be used for test injection only :param int limited_retry_iterations: number of iterations to wait for LIMITED_RETRY steps :param dict step_limits: Mapping of step name to number of executions allowed in a convergence cycle """ MultiService.__init__(self) self.log = log.bind(otter_service='converger') self._dispatcher = dispatcher self._buckets = range(num_buckets) self.partitioner = partitioner_factory( buckets=self._buckets, log=self.log, got_buckets=self.buckets_acquired) self.partitioner.setServiceParent(self) self.build_timeout = build_timeout self._converge_all_groups = converge_all_groups self.interval = interval self.limited_retry_iterations = limited_retry_iterations self.step_limits = get_step_limits_from_conf(step_limits) # ephemeral mutable state self.currently_converging = Reference(pset()) self.recently_converged = Reference(pmap()) # Groups we're waiting on temporarily, and may give up on. self.waiting = Reference(pmap()) # {group_id: num_iterations_waited}
def get_setting(settings: Settings, guild_id: Guild_id, key: str, f=None, default=None) -> Optional[str]: value = settings.get(guild_id, pmap()).get(key) if value is None: return default if f is not None: return f(value) return value
def get_component( *, ecdb: EntityComponentDatabase[ComponentTemplate], entity: Entity, component_type: Type[ComponentTemplate] ) -> Optional[ComponentTemplate]: entities = ecdb._entities # pylint: disable=protected-access entity_components = entities.get(entity, pyrsistent.pmap()) component = entity_components.get(component_type) return component
def test_dict_comprehension(): "Test that {a:b for a, b in ...} is translated to a pmap." subject = funcs.dict_comprehension print(subject.__source__) actual = subject({'a': 'b', 'c': 'd'}) assert isinstance(actual, PMap) assert actual == pmap({'b': 'a', 'e': 'f'})
def filters(fs): ''' parser.filters is a dictionary of filter functions for the various entries in the given parser's command line arguments. See also help(CommandLineParser). ''' if not fs: return pyr.m() if not is_map(fs): raise ValueError( 'filters must be a mapping of entries to functions.') return pyr.pmap(fs)
def _(action, state): # { 'id' : id, 'state': "done|active", 'todo': "" } next_seq = state['seq'] + 1 return state.transform(['seq'], next_seq, ['todos', next_seq], pmap({ 'id': next_seq, 'todo': action.payload, 'state': 'active' }))
def test_movable_from(self): square = v(0, 3) reachable = {v(0, 0), v(0, 7)} capturable = {v(0, 0), v(7, 7)} piece = Piece(reachable=reachable, capturable=capturable) board = core.Board(pieces=pmap({square: core.WHITE.piece(piece)})) self.assertEqual( sorted(board.movable_from(square=square)), [v(0, 0), v(0, 7), v(7, 7)], )
def calc_contrast_energies(contrast_filter, divisive_normalization_function, divisive_normalization_parameters, cpd_sensitivities): ''' calc_contrast_energies is a calculator that performs divisive normalization on the filtered contrast images and yields a nested map of contrast energy arrays; contrast_energies map has keys that are spatial frequencies (in cycles per degree) and whose values are maps; these maps have keys that are parameter value maps and whose values are the 3D contrast energy arrays. Required afferent parameters: * contrast_filter * divisive_normalization_function, divisive_normalization_parameters * cpd_sensitivities Output efferent values: @ contrast_energies Will be a nested map whose first level of keys are persistent-maps of the divisive normalization parameters and whose second level of keys are a set of frequencies; the values at the second level are the stacks of contrast energy images for the particular divisive normalization parameters and frequencies specified in the keys. ''' # first, calculate the contrast energies at each frequency for all images then we combine them; # since this reuses images internally when the parameters are the same, it shouldn't be too # inefficient: divnfn = divisive_normalization_function params = divisive_normalization_parameters all_cpds = np.unique([ k.to(units.cycle / units.deg).m if pimms.is_quantity(k) else k for s in cpd_sensitivities for k in s.iterkeys() ]) all_cpds = all_cpds * (units.cycles / units.degree) rsps = { cpd: vw.contrast_energy for cpd in all_cpds for vw in [ImageArrayContrastView(contrast_filter, cpd, divnfn, params)] } # flip this around... flip = {} for (k0, v0) in rsps.iteritems(): for (k1, v1) in v0.iteritems(): if k1 not in flip: flip[k1] = {} flip[k1][k0] = v1 rsps = pyr.pmap({k: pyr.pmap(v) for (k, v) in flip.iteritems()}) return {'contrast_energies': rsps}
def typing(e, extenv): visitor = Visitor(extenv) try: unify(types.Unit, visitor.visit(pmap(), e)) except UnifyError: raise ValueError("top level does not have type unit") for name, t in extenv.items(): extenv[name] = deref_typ(t) deref_term(e)
def _combine_details(detailses): """ Take a sequence of details dictionaries and combine them into one. """ # XXX: Only necessary becaause testtools's `gather_details` is perversely # mutatey. result = {} for details in detailses: gather_details(details, result) return pmap(result)
def _when_i_apply_a_new_state_changing_domain_event_to_the_aggregate(context): context.event = Event.generate('EventHappened') _key = context.expected_state_key = 'hello' _value = context.expected_state_value = 'world' apply_map = pmap({ 'EventHappened': lambda agg, *a, **kw: agg.set_state(_key, _value) }) context.aggregate = context.aggregate.apply_event(context.event, apply_map=apply_map)