def _compile_ql_sess_state(self, ctx: CompileContext, ql: qlast.SetSessionState): current_tx = ctx.state.current_tx() schema = current_tx.get_schema() aliases = {} config_vals = {} for item in ql.items: if isinstance(item, qlast.SessionSettingModuleDecl): try: schema.get(item.module) except errors.InvalidReferenceError: raise errors.UnknownModuleError( f'module {item.module!r} does not exist') from None aliases[item.alias] = item.module elif isinstance(item, qlast.SessionSettingConfigDecl): name = item.alias try: desc = config.configs[name] except KeyError: raise errors.ConfigurationError( f'invalid SET expression: ' f'unknown CONFIG setting {name!r}') try: val_ir = ql_compiler.compile_ast_fragment_to_ir( item.expr, schema=schema) val = ireval.evaluate_to_python_val(val_ir.expr, schema=schema) except ireval.StaticEvaluationError: raise RuntimeError('invalid SET expression') else: if not isinstance(val, desc.type): dispname = val_ir.stype.get_displayname(schema) raise errors.ConfigurationError( f'expected a {desc.type.__name__} value, ' f'got {dispname!r}') else: config_vals[name] = val else: raise RuntimeError( f'unsupported SET command type {type(item)!r}') aliases = immutables.Map(aliases) config_vals = immutables.Map(config_vals) if aliases: ctx.state.current_tx().update_modaliases( ctx.state.current_tx().get_modaliases().update(aliases)) if config_vals: ctx.state.current_tx().update_config( ctx.state.current_tx().get_config().update(config_vals)) return dbstate.SessionStateQuery(sess_set_modaliases=aliases, sess_set_config=config_vals)
def test_custom_encoder_default(): # bugdex.serializing.replace_json_default_encoder(bugdex.serializing.CustomEncoder) encoder = bugdex.serializing.CustomEncoder() assert encoder.deep_represent(immutables.Map(a=1)) == dict(a=1) assert encoder.deep_represent({immutables.Map(a=1)}) == [dict(a=1)]
def test_pkb_apply_transaction_on_coinbase(): public_key_0 = SECP256k1PublicKey(b'0' * 64) public_key_1 = SECP256k1PublicKey(b'1' * 64) output_0 = Output(40, public_key_0) output_1 = Output(34, public_key_1) unspent_transaction_outs = immutables.Map() public_key_balances = immutables.Map() transaction = Transaction(inputs=[ Input( construct_reference_to_thin_air(), CoinbaseData(0, b'coinbase of the first block'), ) ], outputs=[output_0, output_1]) result = pkb_apply_transaction(unspent_transaction_outs, public_key_balances, transaction, is_coinbase=True) assert public_key_0 in result assert public_key_1 in result assert result[public_key_0].value == 40 assert result[public_key_1].value == 34
def prepend(self, arg) -> 'BRelation': result_map = immutables.Map() this_map = self.map size = self._size().intValue() for i in map(BInteger, range(1, size + 1)): result_map = result_map.set(i.succ(), this_map.get(i)) result_map = result_map.set(BInteger(1), immutables.Map({arg: arg})) return BRelation(result_map)
def tail(self) -> 'BRelation': size = self._size().intValue() result_map = immutables.Map() for i in map(BInteger, range(2, size + 1)): range_element = self.functionCall(i) result_map = result_map.set( i.pred(), immutables.Map({range_element: range_element})) return BRelation(result_map)
def reverse(self) -> 'BRelation': size = self.card() result_map = immutables.Map() for i in map(BInteger, range(1, size.intValue() + 1)): range_element = self.functionCall(size.minus(i).succ()) result_map = result_map.set( i, immutables.Map({range_element: range_element})) return BRelation(result_map)
def test_pkb_apply_transaction_on_non_coinbase_transaction(): public_key_0 = SECP256k1PublicKey(b'\x00' * 64) public_key_1 = SECP256k1PublicKey(b'\x01' * 64) public_key_2 = SECP256k1PublicKey(b'\x02' * 64) output_0 = Output(40, public_key_0) output_1 = Output(34, public_key_1) output_3 = Output(66, public_key_1) final_output = Output(30, public_key_2) previous_transaction_hash = b'a' * 32 unspent_transaction_outs = immutables.Map({ OutputReference(previous_transaction_hash, 0): output_0, OutputReference(previous_transaction_hash, 1): output_1, OutputReference(previous_transaction_hash, 2): output_3, }) public_key_balances = immutables.Map({ public_key_0: PKBalance(0, []), public_key_1: PKBalance(100, [ OutputReference(previous_transaction_hash, 1), OutputReference(previous_transaction_hash, 2), ]), }) transaction = Transaction(inputs=[ Input( OutputReference(previous_transaction_hash, 1), SECP256k1Signature(b'y' * 64), ) ], outputs=[final_output]) result = pkb_apply_transaction(unspent_transaction_outs, public_key_balances, transaction, is_coinbase=False) assert result[ public_key_0].value == 0 # not referenced in the transaction under consideration assert result[public_key_0].output_references == [] assert result[public_key_1].value == 100 - 34 assert result[public_key_1].output_references == [ OutputReference(previous_transaction_hash, 2) ] assert result[ public_key_2].value == 30 # the value of the transaction output assert result[public_key_2].output_references == [ OutputReference(transaction.hash(), 0) ]
def empty(cls): return cls( block_by_hash=immutables.Map(), unspent_transaction_outs_by_hash=immutables.Map(), block_by_height_by_hash=immutables.Map(), heads=immutables.Map(), current_chain_hash=None, public_key_balances_by_hash=immutables.Map(), )
def test_log_with_gap(self): broken = immutables.Map({ 1: model.Entry(1, "hello"), 3: model.Entry(1, 1), }) result = log.append(broken, model.Index(1, 1), model.Entry(1, "world")) self.assertEqual(result, immutables.Map({ 1: model.Entry(1, "hello"), 2: model.Entry(1, "world"), }))
def projection2(arg1: 'BSet', arg2: 'BSet') -> 'BRelation': arg_set_1 = arg1.getSet() arg_set_2 = arg2.getSet() result_map = immutables.Map() for element2 in arg_set_2: element2_tuple = immutables.Map({element2: element2}) for element1 in arg_set_1: _tuple = BTuple(element1, element2) result_map = result_map.set(_tuple, element2_tuple) return BRelation(result_map)
class Request: """ The Request is an object passed in through the Dialogue Manager and contains all the information provided by the application client for the dialogue handler to act on. Note: the Request object is read-only since it represents the client state, which should not be mutated. Attributes: domains (str): Domain of the current query. intent (str): Intent of the current query. entities (list): A list of entities in the current query. history (list): List of previous and current responder objects (de-serialized) up to the current conversation. text (str): The query text. frame (): Immutables Map of stored data across multiple dialogue turns. params (Params): An object that modifies how MindMeld process the current turn. context (): Immutables Map containing front-end client state that is passed to the application from the client in the request. confidences (): Immutables Map of keys ``domains``, ``intents``, ``entities`` and ``roles`` containing confidence probabilities across all labels for each classifier. nbest_transcripts_text (tuple): List of alternate n-best transcripts from an ASR system nbest_transcripts_entities (tuple): List of lists of extracted entities for each of the n-best transcripts. nbest_aligned_entities (tuple): List of lists of aligned entities for each of the n-best transcripts. """ domain = attr.ib(default=None) intent = attr.ib(default=None) entities = attr.ib(default=attr.Factory(tuple), converter=tuple_elems_to_immutable_map) history = attr.ib(default=attr.Factory(tuple), converter=tuple_elems_to_immutable_map) text = attr.ib(default=None) frame = attr.ib(default=immutables.Map(), converter=immutables.Map) params = attr.ib(default=FrozenParams()) context = attr.ib(default=immutables.Map(), converter=immutables.Map) confidences = attr.ib(default=immutables.Map(), converter=immutables.Map) nbest_transcripts_text = attr.ib(default=attr.Factory(tuple), converter=tuple_elems_to_immutable_map) nbest_transcripts_entities = attr.ib( default=attr.Factory(tuple), converter=tuple_elems_to_immutable_map) nbest_aligned_entities = attr.ib(default=attr.Factory(tuple), converter=tuple_elems_to_immutable_map) form = attr.ib(default=attr.Factory(dict)) def to_dict(self): return { "text": self.text, "domain": self.domain, "intent": self.intent, "context": dict(self.context), "params": self.params.to_dict(), "frame": dict(self.frame), }
def fnc(self) -> 'BRelation': this_map = self.map domain = self.domain().getSet() result_map = immutables.Map() for domain_element in domain: _range = this_map[domain_element] range_set = BSet(_range) result_map = result_map.set(domain_element, immutables.Map({range_set: range_set})) return BRelation(result_map)
def test_context_can_be_temporarily_overridden(logger, spp_handler, log_stream): spp_handler.set_context( immutables.Map( log_correlation_id="default_correlation_id", log_correlation_type="AUTO", log_level=logging.INFO, ) ) logger.info("my info log message") logger.debug("a debug message") with spp_handler.override_context( immutables.Map( log_correlation_id="override_correlation_id", log_correlation_type="AUTO", log_level=logging.DEBUG, ) ): logger.debug("my overridden debug") log_messages = parse_log_lines(log_stream.getvalue()) assert len(log_messages) == 2 assert log_messages[0]["log_correlation_id"] == "default_correlation_id" assert log_messages[0]["description"] == "my info log message" assert log_messages[1]["log_correlation_id"] == "override_correlation_id" assert log_messages[1]["description"] == "my overridden debug" def test_format_log_level(spp_handler): assert spp_handler.format_log_level("INFO") == "INFO" assert spp_handler.format_log_level(20) == "INFO" def test_log_level_int(spp_handler): assert spp_handler.log_level_int("INFO") == 20 assert spp_handler.log_level_int(20) == 20 def test_context_log_level_is_always_string(spp_handler): spp_handler.set_context( immutables.Map( log_correlation_id=str(uuid4()), log_correlation_type="AUTO", log_level=logging.WARNING, ) ) assert spp_handler.context["log_level"] == "WARNING" spp_handler.set_context( immutables.Map( log_correlation_id=str(uuid4()), log_correlation_type="AUTO", log_level="INFO", ) ) assert spp_handler.context["log_level"] == "INFO"
def fromSet(_set: 'BSet') -> 'BRelation': result_map = immutables.Map() for e in _set: key = e.projection1() value = e.projection2() _range = result_map.get(key, None) if _range is None: _range = immutables.Map({value: value}) result_map = result_map.set(key, _range) else: _range = _range.set(value, value) result_map = result_map.set(key, _range) return BRelation(result_map)
def __init__(self, db: Database, *, user): self._db = db self._user = user self._config = immutables.Map() self._modaliases = immutables.Map({None: 'default'}) # Whenever we are in a transaction that had executed a # DDL command, we use this cache for compiled queries. self._eql_to_compiled = lru.LRUMapping( maxsize=defines._MAX_QUERIES_CACHE) self._new_tx_state()
def __init__(self, *args): if len(args) == 0: self.map = immutables.Map() elif len(args) == 1 and type(args[0]) == immutables.Map: self.map = args[0] else: self.map = immutables.Map() for e in args: key = e.projection1() value = e.projection2() _set = self.map.get(key) if not _set: _set = frozenset() _set = _set.union({value}) self.map = self.map.set(key, _set)
def test_context_can_be_temporarily_overridden(spp_logger, log_stream): spp_logger.set_context( immutables.Map( log_correlation_id="default_correlation_id", log_correlation_type="AUTO", log_level=logging.INFO, ) ) spp_logger.info("my info log message") spp_logger.debug("a debug message") with spp_logger.override_context( immutables.Map( log_correlation_id="override_correlation_id", log_correlation_type="AUTO", log_level=logging.DEBUG, ) ): spp_logger.debug("my overridden debug") log_messages = parse_log_lines(log_stream.getvalue()) assert len(log_messages) == 2 assert log_messages[0]["log_correlation_id"] == "default_correlation_id" assert log_messages[0]["description"] == "my info log message" assert log_messages[1]["log_correlation_id"] == "override_correlation_id" assert log_messages[1]["description"] == "my overridden debug" def test_setLevel_disabled(spp_logger): with pytest.raises(LogLevelException) as err: spp_logger.setLevel(logging.WARNING) assert str(err.value) == ( "SPPLogger does not support setting log level this way. " + "Please set the log level using the 'log_level' attribute " + "on your context" ) def test_log_extra_attribute(spp_logger, log_stream): spp_logger.info("my info log message", extra={"foobar": "barfoo"}) log_messages = parse_log_lines(log_stream.getvalue()) assert len(log_messages) == 1 assert log_messages[0]["foobar"] == "barfoo" def test_log_extra_attribute_cannot_override_context(spp_logger, log_stream): spp_logger.info("my info log message", extra={"log_correlation_type": "ERROR"}) log_messages = parse_log_lines(log_stream.getvalue()) assert len(log_messages) == 1 assert log_messages[0]["log_correlation_type"] == "AUTO"
def _update_obj(self, obj_id, updates): if not updates: return self try: data = self._id_to_data[obj_id] except KeyError: data = immu.Map() name_to_id = None shortname_to_id = None globalname_to_id = None with data.mutate() as mm: for field, value in updates.items(): if field == 'name': name_to_id, shortname_to_id, globalname_to_id = ( self._update_obj_name(obj_id, self._id_to_type[obj_id], mm.get('name'), value)) if value is None: mm.pop(field, None) else: mm[field] = value new_data = mm.finish() id_to_data = self._id_to_data.set(obj_id, new_data) scls = self._id_to_type[obj_id] refs_to = self._update_refs_to(scls, data, new_data) return self._replace(name_to_id=name_to_id, shortname_to_id=shortname_to_id, globalname_to_id=globalname_to_id, id_to_data=id_to_data, refs_to=refs_to)
class FrozenParams(Params): """ An immutable version of the Params object. Attributes: allowed_intents (list, str): A list of intents that you can set to force the language processor to choose from. target_dialogue_state (str): The name of the dialogue handler that you want to reach in the next turn. time_zone (str): The name of an IANA time zone, such as 'America/Los_Angeles', or 'Asia/Kolkata'. language (str): The language code representing ISO 639-1/2 language codes locale (str, optional): The locale representing the ISO 639-1/2 language code and ISO3166 alpha 2 country code separated by an underscore character. timestamp (long): A unix time stamp for the request accurate to the nearest second. dynamic_resource (dict): A dictionary containing data used to influence the language classifiers by adding resource data for the given turn. """ allowed_intents = attr.ib(default=attr.Factory(tuple), converter=tuple) target_dialogue_state = attr.ib(default=None) time_zone = attr.ib(default=None) timestamp = attr.ib(default=0) language = attr.ib(default=None) locale = attr.ib(default=None) dynamic_resource = attr.ib(default=immutables.Map(), converter=immutables.Map)
def parallelProduct(self, arg: 'BRelation') -> 'BRelation': this_map = self.map this_domain = this_map.keys() other_map = arg.map other_domain = other_map.keys() result_map = immutables.Map() for domain_elementThis in this_domain: for domaineElementOther in other_domain: domain_element_this_element = domain_elementThis domain_element_other_element = domaineElementOther this_range = this_map[domain_element_this_element] other_range = other_map[domain_element_other_element] result_range = set() for lhs in this_range: for rhs in other_range: lhs_element = lhs rhs_element = rhs result_range = result_range.union( [BTuple(lhs_element, rhs_element)]) _tuple = BTuple(domain_element_this_element, domain_element_other_element) result_map = result_map.set(_tuple, result_range) return BRelation(result_map)
def construct_block_for_mining_genesis( non_coinbase_transactions: List[Transaction], miner_public_key: SECP256k1PublicKey, current_timestamp: int, random_data: bytes, nonce: int, ) -> Block: coinstate = CoinState.empty() current_height = 0 unspent_transaction_outs: immutables.Map[OutputReference, Output] = immutables.Map() coinbase_transaction = construct_coinbase_transaction( current_height, non_coinbase_transactions, unspent_transaction_outs, random_data, miner_public_key) transactions = [coinbase_transaction] + non_coinbase_transactions summary = construct_minable_summary(coinstate, transactions, current_timestamp, nonce) evidence = construct_pow_evidence(coinstate, summary, current_height, transactions) return Block(BlockHeader(summary, evidence), transactions)
def test_uto_apply_transaction_on_coinbase(): public_key = SECP256k1PublicKey(b'x' * 64) output_0 = Output(40, public_key) output_1 = Output(34, public_key) unspent_transaction_outs = immutables.Map() transaction = Transaction(inputs=[ Input( construct_reference_to_thin_air(), CoinbaseData(0, b'coinbase of the first block'), ) ], outputs=[output_0, output_1]) result = uto_apply_transaction(unspent_transaction_outs, transaction, is_coinbase=True) assert OutputReference(transaction.hash(), 0) in result assert OutputReference(transaction.hash(), 1) in result assert result[OutputReference(transaction.hash(), 0)] == output_0 assert result[OutputReference(transaction.hash(), 1)] == output_1
def test_server_config_02(self): storage = immutables.Map() op = ops.Operation(ops.OpCode.CONFIG_ADD, config.ConfigScope.INSTANCE, 'ports', make_port_value(database='f1')) storage1 = op.apply(testspec1, storage) op = ops.Operation(ops.OpCode.CONFIG_ADD, config.ConfigScope.INSTANCE, 'ports', make_port_value(database='f2')) storage2 = op.apply(testspec1, storage1) self.assertEqual( config.lookup('ports', storage2, spec=testspec1), { Port.from_pyvalue(make_port_value(database='f1')), Port.from_pyvalue(make_port_value(database='f2')), }) j = ops.to_json(testspec1, storage2) storage3 = ops.from_json(testspec1, j) self.assertEqual(storage3, storage2) op = ops.Operation(ops.OpCode.CONFIG_REM, config.ConfigScope.INSTANCE, 'ports', make_port_value(database='f1')) storage3 = op.apply(testspec1, storage2) self.assertEqual(config.lookup('ports', storage3, spec=testspec1), { Port.from_pyvalue(make_port_value(database='f2')), }) op = ops.Operation(ops.OpCode.CONFIG_REM, config.ConfigScope.INSTANCE, 'ports', make_port_value(database='f1')) storage4 = op.apply(testspec1, storage3) self.assertEqual(storage3, storage4)
def test_uto_apply_transaction_on_non_coinbase_transaction(): public_key = SECP256k1PublicKey(b'x' * 64) output_0 = Output(40, public_key) output_1 = Output(34, public_key) output_2 = Output(30, public_key) previous_transaction_hash = b'a' * 32 unspent_transaction_outs = immutables.Map({ OutputReference(previous_transaction_hash, 0): output_0, OutputReference(previous_transaction_hash, 1): output_1, }) transaction = Transaction(inputs=[ Input( OutputReference(previous_transaction_hash, 1), SECP256k1Signature(b'y' * 64), ) ], outputs=[output_2]) result = uto_apply_transaction(unspent_transaction_outs, transaction, is_coinbase=False) assert OutputReference(previous_transaction_hash, 0) in result assert OutputReference(previous_transaction_hash, 1) not in result # spent assert OutputReference(transaction.hash(), 0) in result assert result[OutputReference(previous_transaction_hash, 0)] == output_0 assert result[OutputReference(transaction.hash(), 0)] == output_2
def construct(arguments: terms.Arguments) -> t.Optional[terms.Term]: if len(arguments) % 2 != 0: return None entries: t.Dict[terms.Term, terms.Term] = {} for key, value in zip(arguments[::2], arguments[1::2]): entries[key] = value return Mapping(immutables.Map(entries))
def __init__( self, service: str, component: str, environment: str, deployment: str, user: str = None, timezone: str = "UTC", context: immutables.Map = None, log_level: int = INFO, stream: IO = sys.stdout, ) -> None: self.service = service self.component = component self.environment = environment self.deployment = deployment self.user = user self.timezone = timezone super().__init__(stream=stream) if context is None: context = immutables.Map( log_correlation_id=str(uuid4()), log_level=log_level ) self._context = context self.level = self._context.get("log_level", INFO)
def _add( self, id: uuid.UUID, scls: so.Object, data: Mapping[str, Any], ) -> Schema: name = data['name'] if name in self._name_to_id: raise errors.SchemaError( f'{type(scls).__name__} {name!r} is already present ' f'in the schema {self!r}') data = immu.Map(data) name_to_id, shortname_to_id, globalname_to_id = self._update_obj_name( id, scls, None, name) updates = dict( id_to_data=self._id_to_data.set(id, data), id_to_type=self._id_to_type.set(id, scls), name_to_id=name_to_id, shortname_to_id=shortname_to_id, globalname_to_id=globalname_to_id, refs_to=self._update_refs_to(scls, None, data), ) if (isinstance(scls, so.QualifiedObject) and not self.has_module(name.module)): raise errors.UnknownModuleError( f'module {name.module!r} is not in this schema') return self._replace(**updates) # type: ignore
def _get_inh_map(self, schema, context): result = {} for op in self.get_subcommands(type=sd.AlterObjectProperty): result[op.property] = op.source == 'inheritance' return immu.Map(result)
def test_server_config_04(self): storage = immutables.Map() op = ops.Operation(ops.OpCode.CONFIG_SET, config.ConfigScope.SESSION, 'int', 11) storage1 = op.apply(testspec1, storage) self.assertEqual(config.lookup('int', storage1, spec=testspec1), 11) op = ops.Operation(ops.OpCode.CONFIG_SET, config.ConfigScope.SESSION, 'int', '42') with self.assertRaisesRegex(errors.ConfigurationError, "invalid value type for the 'int'"): op.apply(testspec1, storage1) op = ops.Operation(ops.OpCode.CONFIG_SET, config.ConfigScope.SESSION, 'int', 42) storage2 = op.apply(testspec1, storage1) op = ops.Operation(ops.OpCode.CONFIG_SET, config.ConfigScope.SESSION, 'ints', {42}) storage2 = op.apply(testspec1, storage2) op = ops.Operation(ops.OpCode.CONFIG_SET, config.ConfigScope.SESSION, 'ints', {42, 43}) storage2 = op.apply(testspec1, storage2) self.assertEqual(config.lookup('int', storage1, spec=testspec1), 11) self.assertEqual(config.lookup('int', storage2, spec=testspec1), 42) self.assertEqual(config.lookup('ints', storage2, spec=testspec1), {42, 43})
def test_server_config_02(self): storage = immutables.Map() op = ops.Operation(ops.OpCode.CONFIG_ADD, ops.OpLevel.SYSTEM, 'ports', make_port_value(database='f1')) storage1 = op.apply(testspec1, storage) op = ops.Operation(ops.OpCode.CONFIG_ADD, ops.OpLevel.SYSTEM, 'ports', make_port_value(database='f2')) storage2 = op.apply(testspec1, storage1) self.assertEqual( storage2['ports'], { Port.from_pyvalue(make_port_value(database='f1')), Port.from_pyvalue(make_port_value(database='f2')), }) j = ops.to_json(testspec1, storage2) storage3 = ops.from_json(testspec1, j) self.assertEqual(storage3, storage2) op = ops.Operation(ops.OpCode.CONFIG_REM, ops.OpLevel.SYSTEM, 'ports', make_port_value(database='f1')) storage3 = op.apply(testspec1, storage2) self.assertEqual(storage3['ports'], { Port.from_pyvalue(make_port_value(database='f2')), }) op = ops.Operation(ops.OpCode.CONFIG_REM, ops.OpLevel.SYSTEM, 'ports', make_port_value(database='f1')) storage4 = op.apply(testspec1, storage3) self.assertEqual(storage3, storage4)