def impersonate(request): # type: (http.HttpRequest) -> http.HttpResponse if hasattr(request, "relate_impersonate_original_user"): messages.add_message(request, messages.ERROR, _("Already impersonating someone.")) return redirect("relate-stop_impersonating") if request.method == 'POST': form = ImpersonateForm(request.POST) if form.is_valid(): impersonee = form.cleaned_data["user"] if may_impersonate(cast(User, request.user), cast(User, impersonee)): request.session['impersonate_id'] = impersonee.id request.session['relate_impersonation_header'] = form.cleaned_data[ "add_impersonation_header"] # Because we'll likely no longer have access to this page. return redirect("relate-home") else: messages.add_message(request, messages.ERROR, _("Impersonating that user is not allowed.")) else: form = ImpersonateForm() return render(request, "generic-form.html", { "form_description": _("Impersonate user"), "form": form })
def check_mapping_str_interpolation(self, specifiers: List[ConversionSpecifier], replacements: Node) -> None: dict_with_only_str_literal_keys = (isinstance(replacements, DictExpr) and all(isinstance(k, StrExpr) for k, v in cast(DictExpr, replacements).items)) if dict_with_only_str_literal_keys: mapping = {} # type: Dict[str, Type] for k, v in cast(DictExpr, replacements).items: key_str = cast(StrExpr, k).value mapping[key_str] = self.accept(v) for specifier in specifiers: if specifier.key not in mapping: self.msg.key_not_in_mapping(specifier.key, replacements) return rep_type = mapping[specifier.key] expected_type = self.conversion_type(specifier.type, replacements) if expected_type is None: return self.chk.check_subtype(rep_type, expected_type, replacements, messages.INCOMPATIBLE_TYPES_IN_STR_INTERPOLATION, 'expression has type', 'placeholder with key \'%s\' has type' % specifier.key) else: rep_type = self.accept(replacements) dict_type = self.chk.named_generic_type('builtins.dict', [AnyType(), AnyType()]) self.chk.check_subtype(rep_type, dict_type, replacements, messages.FORMAT_REQUIRES_MAPPING, 'expression has type', 'expected type for mapping is')
def expand_caller_var_args(arg_types: List[Type], fixed_argc: int) -> Tuple[List[Type], Type]: """Expand the caller argument types in a varargs call. Fixedargc is the maximum number of fixed arguments that the target function accepts. Return (fixed argument types, type of the rest of the arguments). Return (None, None) if the last (vararg) argument had an invalid type. If the vararg argument was not an array (nor dynamic), the last item in the returned tuple is None. """ if isinstance(arg_types[-1], TupleType): return arg_types[:-1] + (cast(TupleType, arg_types[-1])).items, None else: item_type = Undefined # type: Type if isinstance(arg_types[-1], AnyType): item_type = AnyType() elif isinstance(arg_types[-1], Instance) and ( cast(Instance, arg_types[-1]).type.fullname() == 'builtins.list'): # List. item_type = (cast(Instance, arg_types[-1])).args[0] else: return None, None if len(arg_types) > fixed_argc: return arg_types[:-1], item_type else: return (arg_types[:-1] + [item_type] * (fixed_argc - len(arg_types) + 1), item_type)
def _parity_interaction(q0: ops.QubitId, q1: ops.QubitId, rads: float, tolerance: float, gate: Optional[ops.ReversibleEffect] = None): """Yields a ZZ interaction framed by the given operation.""" if abs(rads) < tolerance: return h = rads * -2 / np.pi if gate is not None: g = cast(ops.Gate, gate) yield g.on(q0), g.on(q1) # If rads is ±pi/4 radians within tolerance, single full-CZ suffices. if _is_trivial_angle(rads, tolerance): yield ops.CZ.on(q0, q1) else: yield ops.CZ(q0, q1) ** (-2 * h) yield ops.Z(q0)**h yield ops.Z(q1)**h if gate is not None: g = cast(ops.Gate, gate.inverse()) yield g.on(q0), g.on(q1)
def _load_bytes(f): # type: (Union[IO[bytes], Text]) -> bytes if hasattr(f, 'read') and callable(cast(IO[bytes], f).read): s = cast(IO[bytes], f).read() else: with open(cast(Text, f), 'rb') as readable: s = readable.read() return s
def test_replace_payment_source(self, *mocks: Mock) -> None: user = self.example_user("hamlet") self.login(user.email) self.upgrade() # Try replacing with a valid card stripe_token = stripe_create_token(card_number='5555555555554444').id response = self.client_post("/json/billing/sources/change", {'stripe_token': ujson.dumps(stripe_token)}) self.assert_json_success(response) number_of_sources = 0 for stripe_source in stripe_get_customer(Customer.objects.first().stripe_customer_id).sources: self.assertEqual(cast(stripe.Card, stripe_source).last4, '4444') number_of_sources += 1 self.assertEqual(number_of_sources, 1) audit_log_entry = RealmAuditLog.objects.order_by('-id') \ .values_list('acting_user', 'event_type').first() self.assertEqual(audit_log_entry, (user.id, RealmAuditLog.STRIPE_CARD_CHANGED)) RealmAuditLog.objects.filter(acting_user=user).delete() # Try replacing with an invalid card stripe_token = stripe_create_token(card_number='4000000000009987').id with patch("corporate.lib.stripe.billing_logger.error") as mock_billing_logger: response = self.client_post("/json/billing/sources/change", {'stripe_token': ujson.dumps(stripe_token)}) mock_billing_logger.assert_called() self.assertEqual(ujson.loads(response.content)['error_description'], 'card error') self.assert_json_error_contains(response, 'Your card was declined') number_of_sources = 0 for stripe_source in stripe_get_customer(Customer.objects.first().stripe_customer_id).sources: self.assertEqual(cast(stripe.Card, stripe_source).last4, '4444') number_of_sources += 1 self.assertEqual(number_of_sources, 1) self.assertFalse(RealmAuditLog.objects.filter(event_type=RealmAuditLog.STRIPE_CARD_CHANGED).exists())
def type_object_type_from_function(init_or_new: FuncBase, info: TypeInfo, fallback: Instance) -> FunctionLike: signature = method_type_with_fallback(init_or_new, fallback) # The __init__ method might come from a generic superclass # (init_or_new.info) with type variables that do not map # identically to the type variables of the class being constructed # (info). For example # # class A(Generic[T]): def __init__(self, x: T) -> None: pass # class B(A[List[T]], Generic[T]): pass # # We need to first map B's __init__ to the type (List[T]) -> None. signature = cast(FunctionLike, map_type_from_supertype(signature, info, init_or_new.info)) if init_or_new.info.fullname() == "builtins.dict": # Special signature! special_sig = "dict" else: special_sig = None if isinstance(signature, CallableType): return class_callable(signature, info, fallback, special_sig) else: # Overloaded __init__/__new__. items = [] # type: List[CallableType] for item in cast(Overloaded, signature).items(): items.append(class_callable(item, info, fallback, special_sig)) return Overloaded(items)
def has_no_attr(self, typ: Type, member: str, context: Context) -> Type: """Report a missing or non-accessible member. The type argument is the base type. If member corresponds to an operator, use the corresponding operator name in the messages. Return type Any. """ if (isinstance(typ, Instance) and (cast(Instance, typ)).type.has_readable_member(member)): self.fail('Member "{}" is not assignable'.format(member), context) elif isinstance(typ, Void): self.check_void(typ, context) elif member == '__contains__': self.fail('Unsupported right operand type for in ({})'.format( self.format(typ)), context) elif member in op_methods.values(): # Access to a binary operator member (e.g. _add). This case does # not handle indexing operations. for op, method in op_methods.items(): if method == member: self.unsupported_left_operand(op, typ, context) break elif member == '__neg__': self.fail('Unsupported operand type for unary - ({})'.format( self.format(typ)), context) elif member == '__pos__': self.fail('Unsupported operand type for unary + ({})'.format( self.format(typ)), context) elif member == '__invert__': self.fail('Unsupported operand type for ~ ({})'.format( self.format(typ)), context) elif member == '__getitem__': # Indexed get. self.fail('Value of type {} is not indexable'.format( self.format(typ)), context) elif member == '__setitem__': # Indexed set. self.fail('Unsupported target for indexed assignment', context) elif member == '__call__': self.fail('{} not callable'.format(self.format(typ)), context) else: # The non-special case: a missing ordinary attribute. if not self.disable_type_names: failed = False if isinstance(typ, Instance) and cast(Instance, typ).type.names: typ = cast(Instance, typ) alternatives = set(typ.type.names.keys()) matches = [m for m in COMMON_MISTAKES.get(member, []) if m in alternatives] matches.extend(best_matches(member, alternatives)[:3]) if matches: self.fail('{} has no attribute "{}"; maybe {}?'.format( self.format(typ), member, pretty_or(matches)), context) failed = True if not failed: self.fail('{} has no attribute "{}"'.format(self.format(typ), member), context) else: self.fail('Some element of union has no attribute "{}"'.format( member), context) return AnyType()
def format(self, typ: Type, verbose: bool = False) -> str: """Convert a type to a relatively short string that is suitable for error messages. Mostly behave like format_simple below, but never return an empty string. """ s = self.format_simple(typ) if s != '': # If format_simple returns a non-trivial result, use that. return s elif isinstance(typ, FunctionLike): func = cast(FunctionLike, typ) if func.is_type_obj(): # The type of a type object type can be derived from the # return type (this always works). itype = cast(Instance, func.items()[0].ret_type) result = self.format(itype) if verbose: # In some contexts we want to be explicit about the distinction # between type X and the type of type object X. result += ' (type object)' return result elif isinstance(func, CallableType): return_type = strip_quotes(self.format(func.ret_type)) if func.is_ellipsis_args: return 'Callable[..., {}]'.format(return_type) arg_types = [strip_quotes(self.format(t)) for t in func.arg_types] return 'Callable[[{}], {}]'.format(", ".join(arg_types), return_type) else: # Use a simple representation for function types; proper # function types may result in long and difficult-to-read # error messages. return 'overloaded function' else: # Default case; we simply have to return something meaningful here. return 'object'
def test_interface(self): # type: () -> None """ Class implements L{IMutableHTTPHeaders}. """ headers = self.headers(rawHeaders=()) cast(TestCase, self).assertProvides(IMutableHTTPHeaders, headers)
def format(self, typ: Type) -> str: """Convert a type to a relatively short string that is suitable for error messages. Mostly behave like format_simple below, but never return an empty string. """ s = self.format_simple(typ) if s != '': # If format_simple returns a non-trivial result, use that. return s elif isinstance(typ, FunctionLike): func = cast(FunctionLike, typ) if func.is_type_obj(): # The type of a type object type can be derived from the # return type (this always works). itype = cast(Instance, func.items()[0].ret_type) return self.format(itype) elif isinstance(func, Callable): arg_types = map(self.format, func.arg_types) return_type = self.format(func.ret_type) return 'Function[[{}] -> {}]'.format(", ".join(arg_types), return_type) else: # Use a simple representation for function types; proper # function types may result in long and difficult-to-read # error messages. return 'functionlike' else: # Default case; we simply have to return something meaningful here. return 'object'
def test_getTextName(self, textPairs): # type: (Iterable[Tuple[Text, Text]]) -> None """ C{getValues} returns an iterable of L{Text} values for the given L{Text} header name. This test only inserts Latin1 text into the header values, which is valid data. """ textHeaders = tuple((name, value) for name, value in textPairs) textValues = defaultdict(list) # type: Dict[Text, List[Text]] for name, value in textHeaders: textValues[normalizeHeaderName(name)].append(value) rawHeaders = tuple( (headerNameAsBytes(name), headerValueAsBytes(value)) for name, value in textHeaders ) for name, _values in textValues.items(): cast(TestCase, self).assertEqual( list(self.getValues(rawHeaders, name)), _values, "header name: {!r}".format(name) )
def test_getTextNameBinaryValues(self, pairs): # type: (Iterable[Tuple[Text, bytes]]) -> None """ C{getValues} returns an iterable of L{Text} values for the given L{Text} header name. This test only inserts binary data into the header values, which includes invalid data if you are a sane person, but arguably technically valid if you read the spec because the spec is unclear about header encodings, so we made sure that works also, if only sort of. """ rawHeaders = tuple( (headerNameAsBytes(name), value) for name, value in pairs ) binaryValues = defaultdict(list) # type: Dict[Text, List[bytes]] for name, value in rawHeaders: binaryValues[headerNameAsText(normalizeHeaderName(name))].append( value ) for textName, values in binaryValues.items(): cast(TestCase, self).assertEqual( tuple(self.getValues(rawHeaders, textName)), tuple(headerValueAsText(value) for value in values), "header name: {!r}".format(textName) )
def process_doc(self, app, doctree): # type: (Sphinx, nodes.document) -> None """Process the docinfo part of the doctree as metadata. Keep processing minimal -- just return what docutils says. """ if len(doctree) > 0 and isinstance(doctree[0], nodes.docinfo): md = app.env.metadata[app.env.docname] for node in doctree[0]: # nodes are multiply inherited... if isinstance(node, nodes.authors): authors = cast(List[nodes.author], node) md['authors'] = [author.astext() for author in authors] elif isinstance(node, nodes.field): assert len(node) == 2 field_name = cast(nodes.field_name, node[0]) field_body = cast(nodes.field_body, node[1]) md[field_name.astext()] = field_body.astext() elif isinstance(node, nodes.TextElement): # other children must be TextElement # see: http://docutils.sourceforge.net/docs/ref/doctree.html#bibliographic-elements # NOQA md[node.__class__.__name__] = node.astext() for name, value in md.items(): if name in ('tocdepth',): try: value = int(value) except ValueError: value = 0 md[name] = value doctree.pop(0)
def visit_assignment_stmt(self, o: AssignmentStmt) -> None: self.line = o.line if (isinstance(o.rvalue, nodes.CallExpr) and isinstance(cast(nodes.CallExpr, o.rvalue).analyzed, nodes.TypeVarExpr)): # Type variable definition -- not a real assignment. return if o.type: self.type(o.type) elif self.inferred: for lvalue in o.lvalues: if isinstance(lvalue, nodes.TupleExpr): items = lvalue.items elif isinstance(lvalue, nodes.ListExpr): items = lvalue.items else: items = [lvalue] for item in items: if hasattr(item, 'is_def') and cast(Any, item).is_def: t = self.typemap.get(item) if t: self.type(t) else: self.log(' !! No inferred type on line %d' % self.line) self.record_line(self.line, TYPE_ANY) super().visit_assignment_stmt(o)
def __init__(self, archive: BinaryIO, offset: int, length: int, prefix: bytes): archive.seek(offset) self.name = archive.name self.remaining = length self.sources = [cast(io.BufferedIOBase, archive)] if prefix: self.sources.insert(0, cast(io.BufferedIOBase, io.BytesIO(prefix)))
def _sendPrintJobWaitOnWriteJobFinished(self, job: WriteFileJob) -> None: if self._write_job_progress_message: self._write_job_progress_message.hide() self._progress_message = Message(i18n_catalog.i18nc("@info:status", "Sending data to printer"), lifetime = 0, dismissable = False, progress = -1, title = i18n_catalog.i18nc("@info:title", "Sending Data")) self._progress_message.addAction("Abort", i18n_catalog.i18nc("@action:button", "Cancel"), icon = None, description = "") self._progress_message.actionTriggered.connect(self._progressMessageActionTriggered) self._progress_message.show() parts = [] target_printer, preferred_format, stream = self._dummy_lambdas # If a specific printer was selected, it should be printed with that machine. if target_printer: target_printer = self._printer_uuid_to_unique_name_mapping[target_printer] parts.append(self._createFormPart("name=require_printer_name", bytes(target_printer, "utf-8"), "text/plain")) # Add user name to the print_job parts.append(self._createFormPart("name=owner", bytes(self._getUserName(), "utf-8"), "text/plain")) file_name = CuraApplication.getInstance().getPrintInformation().jobName + "." + preferred_format["extension"] output = stream.getvalue() # Either str or bytes depending on the output mode. if isinstance(stream, io.StringIO): output = cast(str, output).encode("utf-8") output = cast(bytes, output) parts.append(self._createFormPart("name=\"file\"; filename=\"%s\"" % file_name, output)) self._latest_reply_handler = self.postFormWithParts("print_jobs/", parts, on_finished = self._onPostPrintJobFinished, on_progress = self._onUploadPrintJobProgress)
def get_actual_type(arg_type: Type, kind: int, tuple_counter: List[int]) -> Type: """Return the type of an actual argument with the given kind. If the argument is a *arg, return the individual argument item. """ if kind == nodes.ARG_STAR: if isinstance(arg_type, Instance): if arg_type.type.fullname() == 'builtins.list': # List *arg. return arg_type.args[0] elif arg_type.args: # TODO try to map type arguments to Iterable return arg_type.args[0] else: return AnyType() elif isinstance(arg_type, TupleType): # Get the next tuple item of a tuple *arg. tuplet = cast(TupleType, arg_type) tuple_counter[0] += 1 return tuplet.items[tuple_counter[0] - 1] else: return AnyType() elif kind == nodes.ARG_STAR2: if isinstance(arg_type, Instance) and ( (cast(Instance, arg_type)).type.fullname() == 'builtins.dict'): # Dict **arg. TODO more general (Mapping) return (cast(Instance, arg_type)).args[1] else: return AnyType() else: # No translation for other kinds. return arg_type
def event(self, event: Event): if self._selection_tool and self._selection_tool.event(event): return if self._active_tool and self._active_tool.event(event): return if self._camera_tool and self._camera_tool.event(event): return if self._tools and event.type == Event.KeyPressEvent: event = cast(KeyEvent, event) from UM.Scene.Selection import Selection # Imported here to prevent a circular dependency. if Selection.hasSelection(): for key, tool in self._tools.items(): if tool.getShortcutKey() is not None and event.key == tool.getShortcutKey(): self.setActiveTool(tool) if self._active_view: self._active_view.event(event) if event.type == Event.MouseReleaseEvent: event = cast(MouseEvent, event) if MouseEvent.RightButton in event.buttons: self.contextMenuRequested.emit(event.x, event.y)
async def async_step_init( self, user_input: Optional[Dict[str, str]] = None) \ -> Dict[str, Any]: """Handle the step of the form.""" errors = {} if user_input is not None: try: cast(ExampleAuthProvider, self._auth_provider)\ .async_validate_login(user_input['username'], user_input['password']) except InvalidAuthError: errors['base'] = 'invalid_auth' if not errors: user_input.pop('password') return await self.async_finish(user_input) schema = OrderedDict() # type: Dict[str, type] schema['username'] = str schema['password'] = str return self.async_show_form( step_id='init', data_schema=vol.Schema(schema), errors=errors, )
def visit_callable(self, template: Callable) -> List[Constraint]: if isinstance(self.actual, Callable): cactual = cast(Callable, self.actual) # FIX verify argument counts # FIX what if one of the functions is generic res = [] # type: List[Constraint] for i in range(len(template.arg_types)): # Negate constraints due function argument type contravariance. res.extend(negate_constraints(infer_constraints( template.arg_types[i], cactual.arg_types[i], self.direction))) res.extend(infer_constraints(template.ret_type, cactual.ret_type, self.direction)) return res elif isinstance(self.actual, AnyType): # FIX what if generic res = self.infer_against_any(template.arg_types) res.extend(infer_constraints(template.ret_type, AnyType(), self.direction)) return res elif isinstance(self.actual, Overloaded): return self.infer_against_overloaded(cast(Overloaded, self.actual), template) else: return []
def handle_renegotiate(self, packets): # type: (BaseConnection, Sequence[MsgPackable]) -> bool """The handler for connection renegotiations This is to deal with connection maintenance. For instance, it could be that a compression method fails to decode on the other end, and a node will need to renegotiate which methods it is using. Hence the name of the flag associated with it, "renegotiate". Args: packets: A :py:class:`tuple` containing the packets received in this message Returns: ``True`` if an action was taken, ``False`` if not """ if packets[0] == flags.renegotiate: if packets[3] == flags.compression: encoded_methods = packets[4] respond = (self.compression != encoded_methods) self.compression = list(cast(Iterable[int], encoded_methods)) self.__print__( "Compression methods changed to: %s" % repr(self.compression), level=2) if respond: self.send(flags.renegotiate, flags.compression, cast(Tuple[int, ...], intersect(compression, self.compression))) return True elif packets[3] == flags.resend: self.send(*self.last_sent) return True return False
def visit_call_expr(self, e: CallExpr) -> int: args = [] # type: List[int] for arg in e.args: args.append(self.accept(arg)) if isinstance(e.callee, NameExpr): name = cast(NameExpr, e.callee) target = self.target_register() self.add(CallDirect(target, name.name, args)) elif isinstance(e.callee, MemberExpr): member = cast(MemberExpr, e.callee) receiver = self.accept(member.expr) target = self.target_register() receiver_type = self.types[member.expr] assert isinstance(receiver_type, Instance) # TODO more flexible typeinfo = (cast(Instance, receiver_type)).type self.add(CallMethod(target, receiver, member.name, typeinfo, args)) elif isinstance(e.callee, SuperExpr): superexpr = cast(SuperExpr, e.callee) target = self.target_register() self.add(CallMethod(target, 0, superexpr.name, superexpr.info.bases[0].type, args, static=True)) else: raise NotImplementedError('call target %s' % type(e.callee)) return target
def proxies_from_env() -> Dict[str, ProxyInfo]: proxy_urls = {k: URL(v) for k, v in getproxies().items() if k in ('http', 'https')} netrc_obj = netrc_from_env() stripped = {k: strip_auth_from_url(v) for k, v in proxy_urls.items()} ret = {} for proto, val in stripped.items(): proxy, auth = val if proxy.scheme == 'https': client_logger.warning( "HTTPS proxies %s are not supported, ignoring", proxy) continue if netrc_obj and auth is None: auth_from_netrc = None if proxy.host is not None: auth_from_netrc = netrc_obj.authenticators(proxy.host) if auth_from_netrc is not None: # auth_from_netrc is a (`user`, `account`, `password`) tuple, # `user` and `account` both can be username, # if `user` is None, use `account` *logins, password = auth_from_netrc login = logins[0] if logins[0] else logins[-1] auth = BasicAuth(cast(str, login), cast(str, password)) ret[proto] = ProxyInfo(proxy, auth) return ret
def make_class_constructor(self, tdef: ClassDef) -> None: # Do we have a non-empty __init__? init = cast(FuncDef, tdef.info.get_method('__init__')) init_argc = len(init.args) - 1 if init.info.fullname() == 'builtins.object': init = None self.enter() if init: args = [] # type: List[int] for arg in init.args[1:]: args.append(self.add_local(arg)) target = self.alloc_register() self.add(Construct(target, tdef.info)) # Inititalize data attributes to default values. for name, node in sorted(tdef.info.names.items()): if isinstance(node.node, Var): var = cast(Var, node.node) temp = self.alloc_register() vtype = var.type if is_named_instance(vtype, 'builtins.int'): self.add(SetRI(temp, 0)) else: self.add(SetRNone(temp)) self.add(SetAttr(target, name, temp, tdef.info)) if init: self.add(CallMethod(self.alloc_register(), target, '__init__', init.info, args, static=True)) self.add(Return(target)) self.generated[tdef.name] = FuncIcode(init_argc, self.blocks, self.register_types) self.leave()
def run_eden_start_with_real_daemon( eden_dir: pathlib.Path, etc_eden_dir: pathlib.Path, home_dir: pathlib.Path, systemd: bool, ) -> None: env = dict(os.environ) if systemd: env["EDEN_EXPERIMENTAL_SYSTEMD"] = "1" else: env.pop("EDEN_EXPERIMENTAL_SYSTEMD", None) command = [ typing.cast(str, FindExe.EDEN_CLI), # T38947910 "--config-dir", str(eden_dir), "--etc-eden-dir", str(etc_eden_dir), "--home-dir", str(home_dir), "start", "--daemon-binary", typing.cast(str, FindExe.EDEN_DAEMON), # T38947910 ] if eden_start_needs_allow_root_option(systemd=systemd): command.extend(["--", "--allowRoot"]) subprocess.check_call(command, env=env)
def merged_with(self, op: 'PauliStringPhasor') -> 'PauliStringPhasor': if not self.can_merge_with(op): raise ValueError('Cannot merge operations: {}, {}'.format(self, op)) neg_sign = (1, -1)[op.pauli_string.negated ^ self.pauli_string.negated] half_turns = (cast(float, self.half_turns) + cast(float, op.half_turns) * neg_sign) return PauliStringPhasor(self.pauli_string, half_turns=half_turns)
def send( self, # type: BaseConnection msg_type, # type: MsgPackable *args, # type: MsgPackable **kargs # type: Union[bytes, int] ): # type: (...) -> InternalMessage """Sends a message through its connection. Args: msg_type: Message type, corresponds to the header in a :py:class:`~py2p.base.InternalMessage` object *args: A list of bytes-like objects, which correspond to the packets to send to you **kargs: There are two available keywords: id: The ID this message should appear to be sent from (default: your ID) time: The time this message should appear to be sent from (default: now in UTC) Returns: the :py:class:`~py2p.base.IntenalMessage` object you just sent, or ``None`` if the sending was unsuccessful """ # Latter is returned if key not found id = cast(bytes, kargs.get('id', self.server.id)) time = cast(int, kargs.get('time') or getUTC()) # Begin real method msg = InternalMessage( msg_type, id, args, self.compression, timestamp=time) return self.send_InternalMessage(msg)
def _non_local_part(q0: ops.QubitId, q1: ops.QubitId, x: float, y: float, z: float, allow_partial_czs: bool, tolerance: float = 1e-8): """Yields non-local operation of KAK decomposition.""" if (allow_partial_czs or all(_is_trivial_angle(e, tolerance) for e in [x, y, z])): return [ _parity_interaction(q0, q1, x, tolerance, cast(ops.ReversibleEffect, ops.Y**-0.5)), _parity_interaction(q0, q1, y, tolerance, cast(ops.ReversibleEffect, ops.X**0.5)), _parity_interaction(q0, q1, z, tolerance) ] if abs(z) >= tolerance: return _xx_yy_zz_interaction_via_full_czs(q0, q1, x, y, z) if y >= tolerance: return _xx_yy_interaction_via_full_czs(q0, q1, x, y) return _xx_interaction_via_full_czs(q0, q1, x)
async def async_step_init( self, user_input: Optional[Dict[str, str]] = None) \ -> Dict[str, Any]: """Handle the step of the form.""" errors = {} hass_http = getattr(self.hass, 'http', None) if hass_http is None or not hass_http.api_password: return self.async_abort( reason='no_api_password_set' ) if user_input is not None: try: cast(LegacyApiPasswordAuthProvider, self._auth_provider)\ .async_validate_login(user_input['password']) except InvalidAuthError: errors['base'] = 'invalid_auth' if not errors: return await self.async_finish({}) return self.async_show_form( step_id='init', data_schema=vol.Schema({'password': str}), errors=errors, )
def send_message_backend(request: HttpRequest, user_profile: UserProfile, message_type_name: str=REQ('type'), req_to: Optional[str]=REQ('to', default=None), forged_str: Optional[str]=REQ("forged", default=None, documentation_pending=True), topic_name: Optional[str]=REQ_topic(), message_content: str=REQ('content'), widget_content: Optional[str]=REQ(default=None, documentation_pending=True), realm_str: Optional[str]=REQ('realm_str', default=None, documentation_pending=True), local_id: Optional[str]=REQ(default=None), queue_id: Optional[str]=REQ(default=None), delivery_type: str=REQ('delivery_type', default='send_now', documentation_pending=True), defer_until: Optional[str]=REQ('deliver_at', default=None, documentation_pending=True), tz_guess: Optional[str]=REQ('tz_guess', default=None, documentation_pending=True), ) -> HttpResponse: # If req_to is None, then we default to an # empty list of recipients. message_to: Union[Sequence[int], Sequence[str]] = [] if req_to is not None: if message_type_name == 'stream': stream_indicator = extract_stream_indicator(req_to) # For legacy reasons check_send_message expects # a list of streams, instead of a single stream. # # Also, mypy can't detect that a single-item # list populated from a Union[int, str] is actually # a Union[Sequence[int], Sequence[str]]. if isinstance(stream_indicator, int): message_to = [stream_indicator] else: message_to = [stream_indicator] else: message_to = extract_private_recipients(req_to) # Temporary hack: We're transitioning `forged` from accepting # `yes` to accepting `true` like all of our normal booleans. forged = forged_str is not None and forged_str in ["yes", "true"] client = request.client is_super_user = request.user.is_api_super_user if forged and not is_super_user: return json_error(_("User not authorized for this query")) realm = None if realm_str and realm_str != user_profile.realm.string_id: if not is_super_user: # The email gateway bot needs to be able to send messages in # any realm. return json_error(_("User not authorized for this query")) try: realm = get_realm(realm_str) except Realm.DoesNotExist: return json_error(_("Unknown organization '{}'").format(realm_str)) if client.name in ["zephyr_mirror", "irc_mirror", "jabber_mirror", "JabberMirror"]: # Here's how security works for mirroring: # # For private messages, the message must be (1) both sent and # received exclusively by users in your realm, and (2) # received by the forwarding user. # # For stream messages, the message must be (1) being forwarded # by an API superuser for your realm and (2) being sent to a # mirrored stream. # # The most important security checks are in # `create_mirrored_message_users` below, which checks the # same-realm constraint. if "sender" not in request.POST: return json_error(_("Missing sender")) if message_type_name != "private" and not is_super_user: return json_error(_("User not authorized for this query")) # For now, mirroring only works with recipient emails, not for # recipient user IDs. if not all(isinstance(to_item, str) for to_item in message_to): return json_error(_("Mirroring not allowed with recipient user IDs")) # We need this manual cast so that mypy doesn't complain about # create_mirrored_message_users not being able to accept a Sequence[int] # type parameter. message_to = cast(Sequence[str], message_to) try: mirror_sender = create_mirrored_message_users(request, user_profile, message_to) except InvalidMirrorInput: return json_error(_("Invalid mirrored message")) if client.name == "zephyr_mirror" and not user_profile.realm.is_zephyr_mirror_realm: return json_error(_("Zephyr mirroring is not allowed in this organization")) sender = mirror_sender else: if "sender" in request.POST: return json_error(_("Invalid mirrored message")) sender = user_profile if (delivery_type == 'send_later' or delivery_type == 'remind') and defer_until is None: return json_error(_("Missing deliver_at in a request for delayed message delivery")) if (delivery_type == 'send_later' or delivery_type == 'remind') and defer_until is not None: return handle_deferred_message(sender, client, message_type_name, message_to, topic_name, message_content, delivery_type, defer_until, tz_guess, forwarder_user_profile=user_profile, realm=realm) ret = check_send_message(sender, client, message_type_name, message_to, topic_name, message_content, forged=forged, forged_timestamp = request.POST.get('time'), forwarder_user_profile=user_profile, realm=realm, local_id=local_id, sender_queue_id=queue_id, widget_content=widget_content) return json_success({"id": ret})
def config(self) -> str | None: return cast("str | None", self.options.config)
def skip(self) -> bool: return cast(bool, self.options.skip)
def publication(self) -> 'OGPublication': pub = super().publication() if pub is None: raise Exception('No publication for this element!') return typing.cast('OGPublication', pub)
def service(self) -> 'OGService': return typing.cast('OGService', super().service())
async def run( self, *, rng: RandomState, historic_individuals: t.Iterable[Individual], ) -> OptimizationResult: config: Minimizer = self.config total_duration = timer(config.time_source) population = self._make_initial_population(rng=rng) budget = config.max_nevals all_evaluations: t.List[Individual] = [] all_models = [] all_evaluations.extend(historic_individuals) for ind in population: ind.prediction = 0 ind.expected_improvement = 0.0 await self._evaluate_all(population, gen=0, rng=rng) budget -= len(population) all_evaluations.extend(population) model = self._fit_next_model( all_evaluations, gen=0, prev_model=None, rng=rng) all_models.append(model) def find_fmin( individuals: t.Iterable[Individual], *, model: SurrogateModel, ) -> float: fmin_operator = self._make_fitness_operator( with_posterior=self.config.fmin_via_posterior, model=model) return min(fmin_operator(ind) for ind in individuals) fmin: float = find_fmin(all_evaluations, model=model) generation = 0 while budget > 0: generation += 1 population = self._resize_population( population, min(budget, config.popsize), model=model, rng=rng) relscale_bound = self._relscale_at_gen(generation) relscale = np.clip(model.length_scales(), None, relscale_bound) self.outputs.event_new_generation( generation, relscale=t.cast(t.Tuple[float], tuple(relscale)), ) offspring = self._acquire( population, model=model, rng=rng, fmin=fmin, relscale=relscale) await self._evaluate_all(offspring, rng=rng, gen=generation) budget -= len(offspring) all_evaluations.extend(offspring) model = self._fit_next_model( all_evaluations, gen=generation, rng=rng, prev_model=model) all_models.append(model) population = self._select( parents=population, offspring=offspring, model=model) fmin = find_fmin(all_evaluations, model=model) return OptimizationResult( all_individuals=all_evaluations, all_models=all_models, duration=total_duration(), )
def model(self) -> SidebarModel: return cast(SidebarModel, super().model())
def make_chunk_iter( stream: t.Union[t.Iterable[bytes], t.BinaryIO], separator: bytes, limit: t.Optional[int] = None, buffer_size: int = 10 * 1024, cap_at_buffer: bool = False, ) -> t.Iterator[bytes]: """Works like :func:`make_line_iter` but accepts a separator which divides chunks. If you want newline based processing you should use :func:`make_line_iter` instead as it supports arbitrary newline markers. .. versionadded:: 0.8 .. versionadded:: 0.9 added support for iterators as input stream. .. versionadded:: 0.11.10 added support for the `cap_at_buffer` parameter. :param stream: the stream or iterate to iterate over. :param separator: the separator that divides chunks. :param limit: the limit in bytes for the stream. (Usually content length. Not necessary if the `stream` is otherwise already limited). :param buffer_size: The optional buffer size. :param cap_at_buffer: if this is set chunks are split if they are longer than the buffer size. Internally this is implemented that the buffer size might be exhausted by a factor of two however. """ _iter = _make_chunk_iter(stream, limit, buffer_size) first_item = next(_iter, b"") if not first_item: return _iter = t.cast(t.Iterator[bytes], chain((first_item, ), _iter)) if isinstance(first_item, str): separator = _to_str(separator) _split = re.compile(f"({re.escape(separator)})").split _join = "".join else: separator = _to_bytes(separator) _split = re.compile(b"(" + re.escape(separator) + b")").split _join = b"".join buffer: t.List[bytes] = [] while True: new_data = next(_iter, b"") if not new_data: break chunks = _split(new_data) new_buf: t.List[bytes] = [] buf_size = 0 for item in chain(buffer, chunks): if item == separator: yield _join(new_buf) new_buf = [] buf_size = 0 else: buf_size += len(item) new_buf.append(item) if cap_at_buffer and buf_size >= buffer_size: rv = _join(new_buf) while len(rv) >= buffer_size: yield rv[:buffer_size] rv = rv[buffer_size:] new_buf = [rv] buf_size = len(rv) buffer = new_buf if buffer: yield _join(buffer)
def dg(self) -> "streamlit.delta_generator.DeltaGenerator": """Get our DeltaGenerator.""" return cast("streamlit.delta_generator.DeltaGenerator", self)
def __init__(self, *, body: Any=None, status: int=200, reason: Optional[str]=None, text: Optional[str]=None, headers: Optional[LooseHeaders]=None, content_type: Optional[str]=None, charset: Optional[str]=None) -> None: if body is not None and text is not None: raise ValueError("body and text are not allowed together") if headers is None: headers = CIMultiDict() elif not isinstance(headers, CIMultiDict): headers = CIMultiDict(headers) else: headers = cast('CIMultiDict[str]', headers) if content_type is not None and "charset" in content_type: raise ValueError("charset must not be in content_type " "argument") if text is not None: if hdrs.CONTENT_TYPE in headers: if content_type or charset: raise ValueError("passing both Content-Type header and " "content_type or charset params " "is forbidden") else: # fast path for filling headers if not isinstance(text, str): raise TypeError("text argument must be str (%r)" % type(text)) if content_type is None: content_type = 'text/plain' if charset is None: charset = 'utf-8' headers[hdrs.CONTENT_TYPE] = ( content_type + '; charset=' + charset) body = text.encode(charset) text = None else: if hdrs.CONTENT_TYPE in headers: if content_type is not None or charset is not None: raise ValueError("passing both Content-Type header and " "content_type or charset params " "is forbidden") else: if content_type is not None: if charset is not None: content_type += '; charset=' + charset headers[hdrs.CONTENT_TYPE] = content_type super().__init__(status=status, reason=reason, headers=headers) if text is not None: self.text = text else: self.body = body self._compressed_body = None # type: Optional[bytes]
def __init__(self, toolpath_object, **kwargs): # type: (Dict[Text, Any], **Any) -> None """ kwargs: metadata: tool document metadata requirements: inherited requirements hints: inherited hints loader: schema_salad.ref_resolver.Loader used to load tool document avsc_names: CWL Avro schema object used to validate document strict: flag to determine strict validation (fail on unrecognized fields) """ self.metadata = kwargs.get("metadata", {}) # type: Dict[Text,Any] self.names = None # type: avro.schema.Names global SCHEMA_FILE, SCHEMA_DIR, SCHEMA_ANY # pylint: disable=global-statement if SCHEMA_FILE is None: get_schema("v1.0") SCHEMA_ANY = cast(Dict[Text, Any], SCHEMA_CACHE["v1.0"][3].idx["https://w3id.org/cwl/salad#Any"]) SCHEMA_FILE = cast(Dict[Text, Any], SCHEMA_CACHE["v1.0"][3].idx["https://w3id.org/cwl/cwl#File"]) SCHEMA_DIR = cast(Dict[Text, Any], SCHEMA_CACHE["v1.0"][3].idx["https://w3id.org/cwl/cwl#Directory"]) names = schema_salad.schema.make_avro_schema([SCHEMA_FILE, SCHEMA_DIR, SCHEMA_ANY], schema_salad.ref_resolver.Loader({}))[0] if isinstance(names, avro.schema.SchemaParseException): raise names else: self.names = names self.tool = toolpath_object self.requirements = kwargs.get("requirements", []) + self.tool.get("requirements", []) self.hints = kwargs.get("hints", []) + self.tool.get("hints", []) self.formatgraph = None # type: Graph if "loader" in kwargs: self.formatgraph = kwargs["loader"].graph self.doc_loader = kwargs["loader"] self.doc_schema = kwargs["avsc_names"] checkRequirements(self.tool, supportedProcessRequirements) self.validate_hints(kwargs["avsc_names"], self.tool.get("hints", []), strict=kwargs.get("strict")) self.schemaDefs = {} # type: Dict[Text,Dict[Text, Any]] sd, _ = self.get_requirement("SchemaDefRequirement") if sd: sdtypes = sd["types"] av = schema_salad.schema.make_valid_avro(sdtypes, {t["name"]: t for t in avroize_type(sdtypes)}, set()) for i in av: self.schemaDefs[i["name"]] = i avro.schema.make_avsc_object(av, self.names) # Build record schema from inputs self.inputs_record_schema = { "name": "input_record_schema", "type": "record", "fields": []} # type: Dict[Text, Any] self.outputs_record_schema = { "name": "outputs_record_schema", "type": "record", "fields": []} # type: Dict[Text, Any] for key in ("inputs", "outputs"): for i in self.tool[key]: c = copy.copy(i) c["name"] = shortname(c["id"]) del c["id"] if "type" not in c: raise validate.ValidationException(u"Missing `type` in parameter `%s`" % c["name"]) if "default" in c and "null" not in aslist(c["type"]): c["type"] = ["null"] + aslist(c["type"]) else: c["type"] = c["type"] c["type"] = avroize_type(c["type"], c["name"]) if key == "inputs": self.inputs_record_schema["fields"].append(c) elif key == "outputs": self.outputs_record_schema["fields"].append(c) try: self.inputs_record_schema = cast(Dict[unicode, Any], schema_salad.schema.make_valid_avro(self.inputs_record_schema, {}, set())) avro.schema.make_avsc_object(self.inputs_record_schema, self.names) except avro.schema.SchemaParseException as e: raise validate.ValidationException(u"Got error `%s` while processing inputs of %s:\n%s" % (Text(e), self.tool["id"], json.dumps(self.inputs_record_schema, indent=4))) try: self.outputs_record_schema = cast(Dict[unicode, Any], schema_salad.schema.make_valid_avro(self.outputs_record_schema, {}, set())) avro.schema.make_avsc_object(self.outputs_record_schema, self.names) except avro.schema.SchemaParseException as e: raise validate.ValidationException(u"Got error `%s` while processing outputs of %s:\n%s" % (Text(e), self.tool["id"], json.dumps(self.outputs_record_schema, indent=4)))
def make_line_iter( stream: t.Union[t.Iterable[bytes], t.BinaryIO], limit: t.Optional[int] = None, buffer_size: int = 10 * 1024, cap_at_buffer: bool = False, ) -> t.Iterator[bytes]: """Safely iterates line-based over an input stream. If the input stream is not a :class:`LimitedStream` the `limit` parameter is mandatory. This uses the stream's :meth:`~file.read` method internally as opposite to the :meth:`~file.readline` method that is unsafe and can only be used in violation of the WSGI specification. The same problem applies to the `__iter__` function of the input stream which calls :meth:`~file.readline` without arguments. If you need line-by-line processing it's strongly recommended to iterate over the input stream using this helper function. .. versionchanged:: 0.8 This function now ensures that the limit was reached. .. versionadded:: 0.9 added support for iterators as input stream. .. versionadded:: 0.11.10 added support for the `cap_at_buffer` parameter. :param stream: the stream or iterate to iterate over. :param limit: the limit in bytes for the stream. (Usually content length. Not necessary if the `stream` is a :class:`LimitedStream`. :param buffer_size: The optional buffer size. :param cap_at_buffer: if this is set chunks are split if they are longer than the buffer size. Internally this is implemented that the buffer size might be exhausted by a factor of two however. """ _iter = _make_chunk_iter(stream, limit, buffer_size) first_item = next(_iter, "") if not first_item: return s = _make_encode_wrapper(first_item) empty = t.cast(bytes, s("")) cr = t.cast(bytes, s("\r")) lf = t.cast(bytes, s("\n")) crlf = t.cast(bytes, s("\r\n")) _iter = t.cast(t.Iterator[bytes], chain((first_item, ), _iter)) def _iter_basic_lines() -> t.Iterator[bytes]: _join = empty.join buffer: t.List[bytes] = [] while True: new_data = next(_iter, "") if not new_data: break new_buf: t.List[bytes] = [] buf_size = 0 for item in t.cast(t.Iterator[bytes], chain(buffer, new_data.splitlines(True))): new_buf.append(item) buf_size += len(item) if item and item[-1:] in crlf: yield _join(new_buf) new_buf = [] elif cap_at_buffer and buf_size >= buffer_size: rv = _join(new_buf) while len(rv) >= buffer_size: yield rv[:buffer_size] rv = rv[buffer_size:] new_buf = [rv] buffer = new_buf if buffer: yield _join(buffer) # This hackery is necessary to merge 'foo\r' and '\n' into one item # of 'foo\r\n' if we were unlucky and we hit a chunk boundary. previous = empty for item in _iter_basic_lines(): if item == lf and previous[-1:] == cr: previous += item item = empty if previous: yield previous previous = item if previous: yield previous
def encode(msg: Message) -> bytes: """ Encode a 'Signing' message into bytes. :param msg: the message object. :return: the bytes. """ msg = cast(SigningMessage, msg) message_pb = ProtobufMessage() dialogue_message_pb = DialogueMessage() signing_msg = signing_pb2.SigningMessage() dialogue_message_pb.message_id = msg.message_id dialogue_reference = msg.dialogue_reference dialogue_message_pb.dialogue_starter_reference = dialogue_reference[0] dialogue_message_pb.dialogue_responder_reference = dialogue_reference[ 1] dialogue_message_pb.target = msg.target performative_id = msg.performative if performative_id == SigningMessage.Performative.SIGN_TRANSACTION: performative = signing_pb2.SigningMessage.Sign_Transaction_Performative( ) # type: ignore terms = msg.terms Terms.encode(performative.terms, terms) raw_transaction = msg.raw_transaction RawTransaction.encode(performative.raw_transaction, raw_transaction) signing_msg.sign_transaction.CopyFrom(performative) elif performative_id == SigningMessage.Performative.SIGN_MESSAGE: performative = signing_pb2.SigningMessage.Sign_Message_Performative( ) # type: ignore terms = msg.terms Terms.encode(performative.terms, terms) raw_message = msg.raw_message RawMessage.encode(performative.raw_message, raw_message) signing_msg.sign_message.CopyFrom(performative) elif performative_id == SigningMessage.Performative.SIGNED_TRANSACTION: performative = signing_pb2.SigningMessage.Signed_Transaction_Performative( ) # type: ignore signed_transaction = msg.signed_transaction SignedTransaction.encode(performative.signed_transaction, signed_transaction) signing_msg.signed_transaction.CopyFrom(performative) elif performative_id == SigningMessage.Performative.SIGNED_MESSAGE: performative = signing_pb2.SigningMessage.Signed_Message_Performative( ) # type: ignore signed_message = msg.signed_message SignedMessage.encode(performative.signed_message, signed_message) signing_msg.signed_message.CopyFrom(performative) elif performative_id == SigningMessage.Performative.ERROR: performative = signing_pb2.SigningMessage.Error_Performative( ) # type: ignore error_code = msg.error_code ErrorCode.encode(performative.error_code, error_code) signing_msg.error.CopyFrom(performative) else: raise ValueError( "Performative not valid: {}".format(performative_id)) dialogue_message_pb.content = signing_msg.SerializeToString() message_pb.dialogue_message.CopyFrom(dialogue_message_pb) message_bytes = message_pb.SerializeToString() return message_bytes
def _init_job(self, joborder, **kwargs): # type: (Dict[Text, Text], **Any) -> Builder """ kwargs: eval_timeout: javascript evaluation timeout use_container: do/don't use Docker when DockerRequirement hint provided make_fs_access: make an FsAccess() object with given basedir basedir: basedir for FsAccess docker_outdir: output directory inside docker for this job docker_tmpdir: tmpdir inside docker for this job docker_stagedir: stagedir inside docker for this job outdir: outdir on host for this job tmpdir: tmpdir on host for this job stagedir: stagedir on host for this job select_resources: callback to select compute resources """ builder = Builder() builder.job = cast(Dict[Text, Union[Dict[Text, Any], List, Text]], copy.deepcopy(joborder)) # Validate job order try: fillInDefaults(self.tool[u"inputs"], builder.job) normalizeFilesDirs(builder.job) validate.validate_ex(self.names.get_name("input_record_schema", ""), builder.job, strict=False, logger=_logger_validation_warnings) except (validate.ValidationException, WorkflowException) as e: raise WorkflowException("Invalid job input record:\n" + Text(e)) builder.files = [] builder.bindings = CommentedSeq() builder.schemaDefs = self.schemaDefs builder.names = self.names builder.requirements = self.requirements builder.hints = self.hints builder.resources = {} builder.timeout = kwargs.get("eval_timeout") builder.debug = kwargs.get("debug") builder.mutation_manager = kwargs.get("mutation_manager") dockerReq, is_req = self.get_requirement("DockerRequirement") builder.make_fs_access = kwargs.get("make_fs_access") or StdFsAccess builder.fs_access = builder.make_fs_access(kwargs["basedir"]) loadListingReq, _ = self.get_requirement("http://commonwl.org/cwltool#LoadListingRequirement") if loadListingReq: builder.loadListing = loadListingReq.get("loadListing") if dockerReq and kwargs.get("use_container"): builder.outdir = builder.fs_access.realpath( dockerReq.get("dockerOutputDirectory") or kwargs.get("docker_outdir") or "/var/spool/cwl") builder.tmpdir = builder.fs_access.realpath(kwargs.get("docker_tmpdir") or "/tmp") builder.stagedir = builder.fs_access.realpath(kwargs.get("docker_stagedir") or "/var/lib/cwl") else: builder.outdir = builder.fs_access.realpath(kwargs.get("outdir") or tempfile.mkdtemp()) builder.tmpdir = builder.fs_access.realpath(kwargs.get("tmpdir") or tempfile.mkdtemp()) builder.stagedir = builder.fs_access.realpath(kwargs.get("stagedir") or tempfile.mkdtemp()) if self.formatgraph: for i in self.tool["inputs"]: d = shortname(i["id"]) if d in builder.job and i.get("format"): checkFormat(builder.job[d], builder.do_eval(i["format"]), self.formatgraph) builder.bindings.extend(builder.bind_input(self.inputs_record_schema, builder.job)) if self.tool.get("baseCommand"): for n, b in enumerate(aslist(self.tool["baseCommand"])): builder.bindings.append({ "position": [-1000000, n], "datum": b }) if self.tool.get("arguments"): for i, a in enumerate(self.tool["arguments"]): lc = self.tool["arguments"].lc.data[i] fn = self.tool["arguments"].lc.filename builder.bindings.lc.add_kv_line_col(len(builder.bindings), lc) if isinstance(a, dict): a = copy.copy(a) if a.get("position"): a["position"] = [a["position"], i] else: a["position"] = [0, i] builder.bindings.append(a) elif ("$(" in a) or ("${" in a): cm = CommentedMap(( ("position", [0, i]), ("valueFrom", a) )) cm.lc.add_kv_line_col("valueFrom", lc) cm.lc.filename = fn builder.bindings.append(cm) else: cm = CommentedMap(( ("position", [0, i]), ("datum", a) )) cm.lc.add_kv_line_col("datum", lc) cm.lc.filename = fn builder.bindings.append(cm) builder.bindings.sort(key=lambda a: a["position"]) builder.resources = self.evalResources(builder, kwargs) return builder
def to_list(x: Union[Collection[Any], pd.Series]) -> List[Any]: if isinstance(x, ABCCollection): return list(x) elif isinstance(x, pd.Series): return cast(List[Any], x.to_list()) raise NotImplementedError(f"Could not convert {type(x).__name__} into a list")
async def _async_load_data(self) -> dict | None: """Load the data.""" return cast(Optional[dict], await self.store.async_load())
def _load_trace_provider(provider: str) -> "TracerProvider": return cast("TracerProvider", _load_provider(provider))
def _scan_dir_of_paths_files(dir_of_paths_files: str) -> List[str]: fullpaths = [os.path.abspath(p.path) for p in os.scandir(dir_of_paths_files)] return cast(List[str], natsort.natsorted(fullpaths))
import os from typing import cast from selfdrive.hardware.base import HardwareBase from selfdrive.hardware.eon.hardware import Android from selfdrive.hardware.tici.hardware import Tici from selfdrive.hardware.pc.hardware import Pc EON = os.path.isfile('/EON') TICI = os.path.isfile('/TICI') PC = not (EON or TICI) if EON: HARDWARE = cast(HardwareBase, Android()) elif TICI: HARDWARE = cast(HardwareBase, Tici()) else: HARDWARE = cast(HardwareBase, Pc())
def loss_fn( model, example, static_metadata=None, # pylint:disable=bad-whitespace num_goals=1): """Computes the loss function for an automaton for a random maze goal. Args: model: Module that computes an additional action matrix for the maze agent. example: Tuple of a maze and an example id, which is used to select a goal location. static_metadata: Unused static metadata. num_goals: How many random goals to choose. Returns: Negative of average value across all (valid) possible initial states, along with some metrics. """ assert static_metadata is None graph, example_id = example num_valid_nodes = graph.graph_metadata.num_nodes # Pick random targets. # TODO(ddjohnson) Remove the cast once JAX fixes its types. targets = jax.random.randint( jax.random.PRNGKey(typing.cast(int, example_id)), (num_goals, ), 0, num_valid_nodes) # Compute automaton outputs, ordered as [source, dest, action] option_actions = model(graph.automaton_graph, dynamic_metadata=graph.graph_metadata).transpose( (1, 2, 0)) static_num_nodes, _, num_options = option_actions.shape # Rewrite failure as staying in place, since we want to make sure the agent # can't do "terminate episode here" as an action. (Note: we could disallow # the automaton to take a fail action, but that wouldn't prevent the automaton # from just never accepting and waiting until the solve times out.) success_prob = jnp.sum(option_actions, axis=1, keepdims=True) option_actions_rewrite = ( option_actions + (1 - success_prob) * jnp.eye(static_num_nodes)[:, :, None]) # Only do the rewrite if there's missing probability mass. If we have too much # mass (from numerical issues) then just re-normalize. option_actions = jnp.where(success_prob < 1, option_actions_rewrite, option_actions / success_prob) # Construct the node-to-node action matrix. primitive_actions = graph.edges.apply_add( in_array=jnp.eye(4), out_array=jnp.zeros([static_num_nodes, static_num_nodes, 4]), in_dims=(0, ), out_dims=(0, 1)) all_actions = jnp.concatenate([primitive_actions, option_actions], axis=-1) # Get soft values for each target. soft_v_values, _, policy = jax.vmap(soft_maze_values, (None, 0))(all_actions, targets) # Average over possible starting positions. valid_mask = jnp.arange(static_num_nodes) < num_valid_nodes average_expected_reward = jnp.sum( jnp.where( valid_mask[None, :], soft_v_values, jnp.zeros_like(soft_v_values))) / (num_goals * num_valid_nodes) loss = -average_expected_reward # Compute additional metrics: # - Entropy of the automaton outputs valid_pair_mask = (valid_mask[:, None, None] & valid_mask[None, :, None]) ok_mask = ((option_actions > 0) & valid_pair_mask) p_log_p = jnp.where(ok_mask, option_actions * jnp.log(option_actions), 0) option_entropy = -jnp.sum(p_log_p) / (4 * num_valid_nodes) # - Probability of taking an automaton action prob_of_choosing_any_option = ( jnp.sum(jnp.where(valid_mask[None, :, None], policy[:, :, 4:], 0)) / (num_goals * num_valid_nodes)) # - How distinct the automaton actions are, measured as the mutual information # between destination and automaton action, conditioned on the source, and # assuming we choose automaton actions uniformly at random. joint_action_and_destination = option_actions / num_options marginal_destinations = jnp.sum(joint_action_and_destination, -1) mutual_info = joint_action_and_destination * ( jnp.log(joint_action_and_destination) + jnp.log(num_options) - jnp.log(marginal_destinations[:, :, None])) mutual_info = jnp.where( (joint_action_and_destination > 0) & valid_pair_mask, mutual_info, 0) mutual_info = jnp.sum(mutual_info) / num_valid_nodes return loss, { "option_entropy_of_dest": option_entropy, "prob_of_choosing_any_option": prob_of_choosing_any_option, "mutual_info_option_and_dest": mutual_info, }
def get_vm(self, at_header: BlockHeaderAPI = None) -> PausingVMAPI: vm = cast(PausingVMAPI, super().get_vm(at_header)) if self._first_vm is None: self._first_vm = vm return vm
def _load_meter_provider(provider: str) -> "MeterProvider": return cast("MeterProvider", _load_provider(provider))
def extrapolate_effect(self: TSelf, factor: float) -> TSelf: if self.is_parameterized(): raise ValueError("Parameterized. Don't know how to extrapolate.") return self._with_exponent( exponent=cast(float, self._exponent) * factor)
def _generate_restapi(self, resource, template): # type: (models.RestAPI, Dict[str, Any]) -> None # typechecker happiness swagger_doc = cast(Dict, resource.swagger_doc) template['data'].setdefault( 'template_file', {}).setdefault( 'chalice_api_swagger', {})['template'] = json.dumps( swagger_doc) template['resource'].setdefault('aws_api_gateway_rest_api', {})[ resource.resource_name] = { 'body': '${data.template_file.chalice_api_swagger.rendered}', # Terraform will diff explicitly configured attributes # to the current state of the resource. Attributes configured # via swagger on the REST api need to be duplicated here, else # terraform will set them back to empty. 'name': swagger_doc['info']['title'], 'binary_media_types': swagger_doc[ 'x-amazon-apigateway-binary-media-types'], 'endpoint_configuration': {'types': [resource.endpoint_type]} } if 'x-amazon-apigateway-policy' in swagger_doc: template['resource'][ 'aws_api_gateway_rest_api'][ resource.resource_name]['policy'] = swagger_doc[ 'x-amazon-apigateway-policy'] if resource.minimum_compression.isdigit(): template['resource'][ 'aws_api_gateway_rest_api'][ resource.resource_name][ 'minimum_compression_size'] = int( resource.minimum_compression) template['resource'].setdefault('aws_api_gateway_deployment', {})[ resource.resource_name] = { 'stage_name': resource.api_gateway_stage, # Ensure that the deployment gets redeployed if we update # the swagger description for the api by using its checksum # in the stage description. 'stage_description': ( "${md5(data.template_file.chalice_api_swagger.rendered)}"), 'rest_api_id': '${aws_api_gateway_rest_api.%s.id}' % ( resource.resource_name), } template['resource'].setdefault('aws_lambda_permission', {})[ resource.resource_name + '_invoke'] = { 'function_name': resource.lambda_function.function_name, 'action': 'lambda:InvokeFunction', 'principal': 'apigateway.amazonaws.com', 'source_arn': "${aws_api_gateway_rest_api.%s.execution_arn}/*" % ( resource.resource_name) } template.setdefault('output', {})[ 'EndpointURL'] = { 'value': '${aws_api_gateway_deployment.%s.invoke_url}' % ( resource.resource_name) } for auth in resource.authorizers: template['resource']['aws_lambda_permission'][ auth.resource_name + '_invoke'] = { 'function_name': auth.function_name, 'action': 'lambda:InvokeFunction', 'principal': 'apigateway.amazonaws.com', 'source_arn': ( "${aws_api_gateway_rest_api.%s.execution_arn}" % ( resource.resource_name) + "/*") }
def predict( self, X: ArrayLike, ensemble: bool = False, alpha: Optional[Union[float, Iterable[float]]] = None, ) -> Union[NDArray, Tuple[NDArray, NDArray]]: """ Predict target on new samples with confidence intervals. Residuals from the training set and predictions from the model clones are central to the computation. Prediction Intervals for a given ``alpha`` are deduced from either - quantiles of residuals (naive and base methods), - quantiles of (predictions +/- residuals) (plus method), - quantiles of (max/min(predictions) +/- residuals) (minmax method). Parameters ---------- X : ArrayLike of shape (n_samples, n_features) Test data. ensemble: bool Boolean determining whether the predictions are ensembled or not. If False, predictions are those of the model trained on the whole training set. If True, predictions from perturbed models are aggregated by the aggregation function specified in the ``agg_function`` attribute. If cv is ``"prefit"``, ``ensemble`` is ignored. By default ``False``. alpha: Optional[Union[float, Iterable[float]]] Can be a float, a list of floats, or a ``ArrayLike`` of floats. Between 0 and 1, represents the uncertainty of the confidence interval. Lower ``alpha`` produce larger (more conservative) prediction intervals. ``alpha`` is the complement of the target coverage level. By default ``None``. Returns ------- Union[NDArray, Tuple[NDArray, NDArray]] - NDArray of shape (n_samples,) if alpha is None. - Tuple[NDArray, NDArray] of shapes (n_samples,) and (n_samples, 2, n_alpha) if alpha is not None. - [:, 0, :]: Lower bound of the prediction interval. - [:, 1, :]: Upper bound of the prediction interval. """ # Checks check_is_fitted(self, self.fit_attributes) self._check_ensemble(ensemble) alpha = cast(Optional[NDArray], check_alpha(alpha)) y_pred = self.single_estimator_.predict(X) n = len(self.conformity_scores_) if alpha is None: return np.array(y_pred) alpha_np = cast(NDArray, alpha) check_alpha_and_n_samples(alpha_np, n) if self.method in ["naive", "base"] or self.cv == "prefit": quantile = np_quantile( self.conformity_scores_, 1 - alpha_np, method="higher" ) y_pred_low = y_pred[:, np.newaxis] - quantile y_pred_up = y_pred[:, np.newaxis] + quantile else: y_pred_multi = np.column_stack( [e.predict(X) for e in self.estimators_] ) # At this point, y_pred_multi is of shape # (n_samples_test, n_estimators_). # If ``method`` is "plus": # - if ``cv`` is not a ``Subsample``, # we enforce y_pred_multi to be of shape # (n_samples_test, n_samples_train), # thanks to the folds identifier. # - if ``cv``is a ``Subsample``, the methode # ``aggregate_with_mask`` fits it to the right size # thanks to the shape of k_. y_pred_multi = self.aggregate_with_mask(y_pred_multi, self.k_) if self.method == "plus": lower_bounds = y_pred_multi - self.conformity_scores_ upper_bounds = y_pred_multi + self.conformity_scores_ if self.method == "minmax": lower_bounds = np.min(y_pred_multi, axis=1, keepdims=True) upper_bounds = np.max(y_pred_multi, axis=1, keepdims=True) lower_bounds = lower_bounds - self.conformity_scores_ upper_bounds = upper_bounds + self.conformity_scores_ y_pred_low = np.column_stack( [ np_quantile( ma.masked_invalid(lower_bounds), _alpha, axis=1, method="lower", ) for _alpha in alpha_np ] ).data y_pred_up = np.column_stack( [ np_quantile( ma.masked_invalid(upper_bounds), 1 - _alpha, axis=1, method="higher", ) for _alpha in alpha_np ] ).data if ensemble: y_pred = aggregate_all(self.agg_function, y_pred_multi) return y_pred, np.stack([y_pred_low, y_pred_up], axis=1)
def _get_base_path(self) -> str: return cast(str, urlsplit(self.location).path)
def getOglLinks(self, xmlOglLinks: NodeList, oglClasses: OglObjects) -> OglLinks: """ Extract the link for the OglClasses Args: xmlOglLinks: A DOM node list of links oglClasses: The OglClasses Returns: The OglLinks list """ oglLinks: OglLinks = cast(OglLinks, []) for xmlLink in xmlOglLinks: # src and dst anchor position xmlLink: Element = cast(Element, xmlLink) sx = PyutUtils.secureFloat(xmlLink.getAttribute(PyutXmlConstants.ATTR_LINK_SOURCE_ANCHOR_X)) sy = PyutUtils.secureFloat(xmlLink.getAttribute(PyutXmlConstants.ATTR_LINK_SOURCE_ANCHOR_Y)) dx = PyutUtils.secureFloat(xmlLink.getAttribute(PyutXmlConstants.ATTR_LINK_DESTINATION_ANCHOR_X)) dy = PyutUtils.secureFloat(xmlLink.getAttribute(PyutXmlConstants.ATTR_LINK_DESTINATION_ANCHOR_Y)) spline: bool = PyutUtils.secureBoolean(xmlLink.getAttribute(PyutXmlConstants.ATTR_SPLINE)) # get the associated PyutLink srcId, dstId, assocPyutLink = self._getPyutLink(xmlLink) try: src: OglClass = oglClasses[srcId] dst: OglClass = oglClasses[dstId] except KeyError as ke: self.logger.error(f'Developer Error -- srcId: {srcId} - dstId: {dstId} error: {ke}') continue linkType: LinkType = assocPyutLink.getType() pyutLink: PyutLink = PyutLink(name=assocPyutLink.getName(), linkType=linkType, cardSrc=assocPyutLink.sourceCardinality, cardDest=assocPyutLink.destinationCardinality, source=src.getPyutObject(), destination=dst.getPyutObject()) oglLinkFactory = getOglLinkFactory() oglLink = oglLinkFactory.getOglLink(src, pyutLink, dst, linkType) src.addLink(oglLink) dst.addLink(oglLink) oglLinks.append(oglLink) oglLink.SetSpline(spline) # put the anchors at the right position srcAnchor = oglLink.GetSource() dstAnchor = oglLink.GetDestination() srcAnchor.SetPosition(sx, sy) dstAnchor.SetPosition(dx, dy) # add the control points to the line line = srcAnchor.GetLines()[0] # only 1 line per anchor in pyut parent = line.GetSource().GetParent() selfLink = parent is line.GetDestination().GetParent() controlPoints: ControlPoints = self._generateControlPoints(xmlLink) for controlPoint in controlPoints: line.AddControl(controlPoint) if selfLink: x, y = controlPoint.GetPosition() controlPoint.SetParent(parent) controlPoint.SetPosition(x, y) if isinstance(oglLink, OglAssociation): self.__furtherCustomizeAssociationLink(xmlLink, oglLink) self._reconstituteLinkDataModel(oglLink) return oglLinks
def matrix(self) -> np.ndarray: if self.is_parameterized(): raise ValueError("Parameterized. Don't have a known matrix.") e = cast(float, self._exponent) return np.sum(1j**(half_turns * e * 2) * component for half_turns, component in self._eigen_components())
def _on_register(self, message: TacMessage) -> None: """ Handle a register message. If the address is not registered, answer with an error message. :param message: the 'get agent state' TacMessage. :return: None """ parameters = cast(Parameters, self.context.parameters) agent_name = message.agent_name if len(parameters.whitelist ) != 0 and agent_name not in parameters.whitelist: self.context.logger.warning( "[{}]: Agent name not in whitelist: '{}'".format( self.context.agent_name, agent_name)) tac_msg = TacMessage( performative=TacMessage.Performative.TAC_ERROR, error_code=TacMessage.ErrorCode.AGENT_NAME_NOT_IN_WHITELIST, ) self.context.outbox.put_message( to=message.counterparty, sender=self.context.agent_address, protocol_id=TacMessage.protocol_id, message=TacSerializer().encode(tac_msg), ) return game = cast(Game, self.context.game) if message.counterparty in game.registration.agent_addr_to_name: self.context.logger.warning( "[{}]: Agent already registered: '{}'".format( self.context.agent_name, game.registration.agent_addr_to_name[message.counterparty], )) tac_msg = TacMessage( performative=TacMessage.Performative.TAC_ERROR, error_code=TacMessage.ErrorCode.AGENT_ADDR_ALREADY_REGISTERED, ) self.context.outbox.put_message( to=message.counterparty, sender=self.context.agent_address, protocol_id=TacMessage.protocol_id, message=TacSerializer().encode(tac_msg), ) if agent_name in game.registration.agent_addr_to_name.values(): self.context.logger.warning( "[{}]: Agent with this name already registered: '{}'".format( self.context.agent_name, agent_name)) tac_msg = TacMessage( performative=TacMessage.Performative.TAC_ERROR, error_code=TacMessage.ErrorCode.AGENT_NAME_ALREADY_REGISTERED, ) self.context.outbox.put_message( to=message.counterparty, sender=self.context.agent_address, protocol_id=TacMessage.protocol_id, message=TacSerializer().encode(tac_msg), ) game.registration.register_agent(message.counterparty, agent_name) self.context.logger.info("[{}]: Agent registered: '{}'".format( self.context.agent_name, agent_name))
def fit( self, X: ArrayLike, y: ArrayLike, sample_weight: Optional[ArrayLike] = None, ) -> MapieRegressor: """ Fit estimator and compute residuals used for prediction intervals. Fit the base estimator under the ``single_estimator_`` attribute. Fit all cross-validated estimator clones and rearrange them into a list, the ``estimators_`` attribute. Out-of-fold residuals are stored under the ``conformity_scores_`` attribute. Parameters ---------- X : ArrayLike of shape (n_samples, n_features) Training data. y : ArrayLike of shape (n_samples,) Training labels. sample_weight : Optional[ArrayLike] of shape (n_samples,) Sample weights for fitting the out-of-fold models. If None, then samples are equally weighted. If some weights are null, their corresponding observations are removed before the fitting process and hence have no residuals. If weights are non-uniform, residuals are still uniformly weighted. By default ``None``. Returns ------- MapieRegressor The model itself. """ # Checks self._check_parameters() cv = check_cv(self.cv) estimator = self._check_estimator(self.estimator) agg_function = self._check_agg_function(self.agg_function) X, y = indexable(X, y) y = _check_y(y) self.n_features_in_ = check_n_features_in(X, cv, estimator) sample_weight, X, y = check_null_weight(sample_weight, X, y) y = cast(NDArray, y) n_samples = _num_samples(y) # Initialization self.estimators_: List[RegressorMixin] = [] # Work if cv == "prefit": self.single_estimator_ = estimator y_pred = self.single_estimator_.predict(X) self.k_ = np.full( shape=(n_samples, 1), fill_value=np.nan, dtype="float" ) else: cv = cast(BaseCrossValidator, cv) self.k_ = np.full( shape=(n_samples, cv.get_n_splits(X, y)), fill_value=np.nan, dtype=float, ) pred_matrix = np.full( shape=(n_samples, cv.get_n_splits(X, y)), fill_value=np.nan, dtype=float, ) self.single_estimator_ = fit_estimator( clone(estimator), X, y, sample_weight ) if self.method == "naive": y_pred = self.single_estimator_.predict(X) else: outputs = Parallel(n_jobs=self.n_jobs, verbose=self.verbose)( delayed(self._fit_and_predict_oof_model)( clone(estimator), X, y, train_index, val_index, sample_weight, ) for train_index, val_index in cv.split(X) ) self.estimators_, predictions, val_indices = map( list, zip(*outputs) ) for i, val_ind in enumerate(val_indices): pred_matrix[val_ind, i] = np.array(predictions[i]) self.k_[val_ind, i] = 1 check_nan_in_aposteriori_prediction(pred_matrix) y_pred = aggregate_all(agg_function, pred_matrix) self.conformity_scores_ = np.abs(y - y_pred) return self