def get_story_update_epic_body(payload: WildValue, action: WildValue) -> str: kwargs = { "story_name_template": STORY_NAME_TEMPLATE.format( name=action["name"].tame(check_string), app_url=action["app_url"].tame(check_string), ), } epic_id = action["changes"]["epic_id"] new_id = epic_id.get("new").tame(check_none_or(check_int)) old_id = epic_id.get("old").tame(check_none_or(check_int)) for ref in payload["references"]: if ref["id"].tame(check_string_or_int) == new_id: kwargs["new_epic_name_template"] = EPIC_NAME_TEMPLATE.format( name=ref["name"].tame(check_string), ) if ref["id"].tame(check_string_or_int) == old_id: kwargs["old_epic_name_template"] = EPIC_NAME_TEMPLATE.format( name=ref["name"].tame(check_string), ) if new_id and old_id: return STORY_EPIC_CHANGED_TEMPLATE.format(**kwargs) elif new_id: kwargs["epic_name_template"] = kwargs["new_epic_name_template"] kwargs["operation"] = "added to" else: kwargs["epic_name_template"] = kwargs["old_epic_name_template"] kwargs["operation"] = "removed from" return STORY_ADDED_REMOVED_EPIC_TEMPLATE.format(**kwargs)
def test_check_none_or(self): # type: () -> None x = 5 # type: Any self.assertEqual(check_none_or(check_int)('x', x), None) x = None self.assertEqual(check_none_or(check_int)('x', x), None) x = 'x' self.assertEqual(check_none_or(check_int)('x', x), 'x is not an integer')
def remote_server_post_analytics(request: HttpRequest, entity: Union[UserProfile, RemoteZulipServer], realm_counts: List[Dict[str, Any]]=REQ( validator=check_list(check_dict_only([ ('property', check_string), ('realm', check_int), ('id', check_int), ('end_time', check_float), ('subgroup', check_none_or(check_string)), ('value', check_int), ]))), installation_counts: List[Dict[str, Any]]=REQ( validator=check_list(check_dict_only([ ('property', check_string), ('id', check_int), ('end_time', check_float), ('subgroup', check_none_or(check_string)), ('value', check_int), ])))) -> HttpResponse: validate_entity(entity) server = cast(RemoteZulipServer, entity) validate_count_stats(server, RemoteRealmCount, realm_counts) validate_count_stats(server, RemoteInstallationCount, installation_counts) BATCH_SIZE = 1000 while len(realm_counts) > 0: batch = realm_counts[0:BATCH_SIZE] realm_counts = realm_counts[BATCH_SIZE:] objects_to_create = [] for item in batch: objects_to_create.append(RemoteRealmCount( property=item['property'], realm_id=item['realm'], remote_id=item['id'], server=server, end_time=datetime.datetime.fromtimestamp(item['end_time'], tz=timezone_utc), subgroup=item['subgroup'], value=item['value'])) RemoteRealmCount.objects.bulk_create(objects_to_create) while len(installation_counts) > 0: batch = installation_counts[0:BATCH_SIZE] installation_counts = installation_counts[BATCH_SIZE:] objects_to_create = [] for item in batch: objects_to_create.append(RemoteInstallationCount( property=item['property'], remote_id=item['id'], server=server, end_time=datetime.datetime.fromtimestamp(item['end_time'], tz=timezone_utc), subgroup=item['subgroup'], value=item['value'])) RemoteInstallationCount.objects.bulk_create(objects_to_create) return json_success()
def generate_multiuse_invite_backend( request: HttpRequest, user_profile: UserProfile, invite_expires_in_days: Optional[int] = REQ( json_validator=check_none_or(check_int), default=settings.INVITATION_LINK_VALIDITY_DAYS), invite_as: int = REQ(json_validator=check_int, default=PreregistrationUser.INVITE_AS["MEMBER"]), stream_ids: Sequence[int] = REQ(json_validator=check_list(check_int), default=[]), ) -> HttpResponse: check_if_owner_required(invite_as, user_profile) streams = [] for stream_id in stream_ids: try: (stream, sub) = access_stream_by_id(user_profile, stream_id) except JsonableError: raise JsonableError( _("Invalid stream id {}. No invites were sent.").format( stream_id)) streams.append(stream) invite_link = do_create_multiuse_invite_link(user_profile, invite_as, invite_expires_in_days, streams) return json_success(request, data={"invite_link": invite_link})
def get_opened_or_update_pull_request_body(helper: Helper) -> str: payload = helper.payload include_title = helper.include_title pull_request = payload["pull_request"] action = payload["action"].tame(check_string) if action == "synchronize": action = "updated" assignee = None if pull_request.get("assignee"): assignee = pull_request["assignee"]["login"].tame(check_string) description = None changes = payload.get("changes", {}) if "body" in changes or action == "opened": description = pull_request["body"].tame(check_none_or(check_string)) return get_pull_request_event_message( get_sender_name(payload), action, pull_request["html_url"].tame(check_string), target_branch=pull_request["head"]["ref"].tame(check_string), base_branch=pull_request["base"]["ref"].tame(check_string), message=description, assignee=assignee, number=pull_request["number"].tame(check_int), title=pull_request["title"].tame(check_string) if include_title else None, )
def test_create_bot(self): bot_created_checker = check_dict([ ('type', equals('realm_bot')), ('op', equals('add')), ('bot', check_dict([ ('email', check_string), ('full_name', check_string), ('api_key', check_string), ('default_sending_stream', check_none_or(check_string)), ('default_events_register_stream', check_none_or(check_string)), ('default_all_public_streams', check_bool), ('avatar_url', check_string), ])), ]) action = lambda: self.create_bot('*****@*****.**') events = self.do_test(action) error = bot_created_checker('events[1]', events[1]) self.assert_on_error(error)
def get_authors_and_committer_info(payload: WildValue) -> str: body = "" author_names = set() committer_names = set() for commit in payload["all_commit_details"]: author_name = commit["author_name"].tame(check_string) author_username = commit["author_login"].tame(check_none_or(check_string)) if author_username is not None: author_names.add(f"{author_name} ({author_username})") else: author_names.add(author_name) if commit["committer_email"].tame(check_none_or(check_string)) is not None: committer_name = commit["committer_name"].tame(check_string) committer_username = commit["committer_login"].tame(check_none_or(check_string)) if committer_username is not None: committer_names.add(f"{committer_name} ({committer_username})") else: committer_names.add(committer_name) author_names_list = list(author_names) author_names_list.sort() committer_names_list = list(committer_names) committer_names_list.sort() authors = ", ".join(author_names_list) committers = ", ".join(committer_names_list) # Add the authors' information to the body. if len(author_names_list) > 1: body += f"- **Authors:** {authors}" else: body += f"- **Author:** {authors}" # Add information about the committers if it was provided. if len(committer_names) > 0: if len(committer_names) > 1: body += f"\n- **Committers:** {committers}" else: body += f"\n- **Committer:** {committers}" return body
def invite_users_backend( request: HttpRequest, user_profile: UserProfile, invitee_emails_raw: str = REQ("invitee_emails"), invite_expires_in_minutes: Optional[int] = REQ( json_validator=check_none_or(check_int), default=settings.INVITATION_LINK_VALIDITY_MINUTES ), invite_as: int = REQ(json_validator=check_int, default=PreregistrationUser.INVITE_AS["MEMBER"]), stream_ids: List[int] = REQ(json_validator=check_list(check_int)), ) -> HttpResponse: if not user_profile.can_invite_others_to_realm(): # Guest users case will not be handled here as it will # be handled by the decorator above. raise JsonableError(_("Insufficient permission")) if invite_as not in PreregistrationUser.INVITE_AS.values(): raise JsonableError(_("Must be invited as an valid type of user")) check_if_owner_required(invite_as, user_profile) if ( invite_as in [ PreregistrationUser.INVITE_AS["REALM_ADMIN"], PreregistrationUser.INVITE_AS["MODERATOR"], ] and not user_profile.is_realm_admin ): raise JsonableError(_("Must be an organization administrator")) if not invitee_emails_raw: raise JsonableError(_("You must specify at least one email address.")) if not stream_ids: raise JsonableError(_("You must specify at least one stream for invitees to join.")) invitee_emails = get_invitee_emails_set(invitee_emails_raw) streams: List[Stream] = [] for stream_id in stream_ids: try: (stream, sub) = access_stream_by_id(user_profile, stream_id) except JsonableError: raise JsonableError( _("Stream does not exist with id: {}. No invites were sent.").format(stream_id) ) streams.append(stream) do_invite_users( user_profile, invitee_emails, streams, invite_expires_in_minutes=invite_expires_in_minutes, invite_as=invite_as, ) return json_success(request)
def get_update_name_body(payload: WildValue, action: WildValue, entity: str) -> str: name = action["changes"]["name"] kwargs = { "entity": entity, "new": name["new"].tame(check_string), "old": name["old"].tame(check_string), "name_template": get_name_template(entity).format( name=action["name"].tame(check_string), app_url=action.get("app_url").tame(check_none_or(check_string)), ), } return NAME_CHANGED_TEMPLATE.format(**kwargs)
def get_comment_added_body(payload: WildValue, action: WildValue, entity: str) -> str: actions = payload["actions"] kwargs = {"entity": entity} for action in actions: if action["id"] == payload["primary_id"]: kwargs["text"] = action["text"].tame(check_string) elif action["entity_type"] == entity: name_template = get_name_template(entity).format( name=action["name"].tame(check_string), app_url=action.get("app_url").tame(check_none_or(check_string)), ) kwargs["name_template"] = name_template return COMMENT_ADDED_TEMPLATE.format(**kwargs)
def get_issue_body(helper: Helper) -> str: payload = helper.payload include_title = helper.include_title action = payload["action"].tame(check_string) issue = payload["issue"] assignee = issue["assignee"] return get_issue_event_message( get_sender_name(payload), action, issue["html_url"].tame(check_string), issue["number"].tame(check_int), issue["body"].tame(check_none_or(check_string)), assignee=assignee["login"].tame(check_string) if assignee else None, title=issue["title"].tame(check_string) if include_title else None, )
def get_update_archived_body(payload: WildValue, action: WildValue, entity: str) -> str: archived = action["changes"]["archived"] if archived["new"]: operation = "archived" else: operation = "unarchived" kwargs = { "entity": entity, "name_template": get_name_template(entity).format( name=action["name"].tame(check_string), app_url=action.get("app_url").tame(check_none_or(check_string)), ), "operation": operation, } return ARCHIVED_TEMPLATE.format(**kwargs)
def get_pr_opened_or_modified_body( payload: WildValue, action: str, include_title: Optional[str] ) -> str: pr = payload["pullRequest"] description = pr.get("description").tame(check_none_or(check_string)) assignees_string = get_assignees_string(pr) if assignees_string: # Then use the custom message template for this particular integration so that we can # specify the reviewers at the end of the message (but before the description/message). parameters = { "user_name": get_user_name(payload), "action": action, "url": pr["links"]["self"][0]["href"].tame(check_string), "number": pr["id"].tame(check_int), "source": pr["fromRef"]["displayId"].tame(check_string), "destination": pr["toRef"]["displayId"].tame(check_string), "message": description, "assignees": assignees_string, "title": pr["title"].tame(check_string) if include_title else None, } if include_title: body = PULL_REQUEST_OPENED_OR_MODIFIED_TEMPLATE_WITH_REVIEWERS_WITH_TITLE.format( **parameters, ) else: body = PULL_REQUEST_OPENED_OR_MODIFIED_TEMPLATE_WITH_REVIEWERS.format(**parameters) punctuation = ":" if description else "." body = f"{body}{punctuation}" if description: body += "\n" + CONTENT_MESSAGE_TEMPLATE.format(message=description) return body return get_pull_request_event_message( user_name=get_user_name(payload), action=action, url=pr["links"]["self"][0]["href"].tame(check_string), number=pr["id"].tame(check_int), target_branch=pr["fromRef"]["displayId"].tame(check_string), base_branch=pr["toRef"]["displayId"].tame(check_string), message=description, assignee=assignees_string if assignees_string else None, title=pr["title"].tame(check_string) if include_title else None, )
def get_update_description_body(payload: WildValue, action: WildValue, entity: str) -> str: desc = action["changes"]["description"] kwargs = { "entity": entity, "new": desc["new"].tame(check_string), "old": desc["old"].tame(check_string), "name_template": get_name_template(entity).format( name=action["name"].tame(check_string), app_url=action.get("app_url").tame(check_none_or(check_string)), ), } if kwargs["new"] and kwargs["old"]: body = DESC_CHANGED_TEMPLATE.format(**kwargs) elif kwargs["new"]: body = NEW_DESC_ADDED_TEMPLATE.format(**kwargs) else: body = DESC_REMOVED_TEMPLATE.format(**kwargs) return body
def get_story_update_state_body(payload: WildValue, action: WildValue) -> str: workflow_state_id = action["changes"]["workflow_state_id"] references = payload["references"] state = {} for ref in references: if ref["id"].tame(check_string_or_int) == workflow_state_id["new"].tame(check_int): state["new"] = ref["name"].tame(check_string) if ref["id"].tame(check_string_or_int) == workflow_state_id["old"].tame(check_int): state["old"] = ref["name"].tame(check_string) kwargs = { "entity": "story", "new": state["new"], "old": state["old"], "name_template": STORY_NAME_TEMPLATE.format( name=action["name"].tame(check_string), app_url=action.get("app_url").tame(check_none_or(check_string)), ), } return STATE_CHANGED_TEMPLATE.format(**kwargs)
def remote_server_post_analytics( request: HttpRequest, entity: Union[UserProfile, RemoteZulipServer], realm_counts: List[Dict[str, Any]] = REQ(validator=check_list( check_dict_only([ ('property', check_string), ('realm', check_int), ('id', check_int), ('end_time', check_float), ('subgroup', check_none_or(check_string)), ('value', check_int), ]))), installation_counts: List[Dict[str, Any]] = REQ(validator=check_list( check_dict_only([ ('property', check_string), ('id', check_int), ('end_time', check_float), ('subgroup', check_none_or(check_string)), ('value', check_int), ]))), realmauditlog_rows: Optional[List[Dict[str, Any]]] = REQ( validator=check_list( check_dict_only([ ('id', check_int), ('realm', check_int), ('event_time', check_float), ('backfilled', check_bool), ('extra_data', check_none_or(check_string)), ('event_type', check_int), ])), default=None) ) -> HttpResponse: server = validate_entity(entity) validate_incoming_table_data(server, RemoteRealmCount, realm_counts, True) validate_incoming_table_data(server, RemoteInstallationCount, installation_counts, True) if realmauditlog_rows is not None: validate_incoming_table_data(server, RemoteRealmAuditLog, realmauditlog_rows) row_objects = [ RemoteRealmCount(property=row['property'], realm_id=row['realm'], remote_id=row['id'], server=server, end_time=datetime.datetime.fromtimestamp( row['end_time'], tz=timezone_utc), subgroup=row['subgroup'], value=row['value']) for row in realm_counts ] batch_create_table_data(server, RemoteRealmCount, row_objects) row_objects = [ RemoteInstallationCount( property=row['property'], remote_id=row['id'], server=server, end_time=datetime.datetime.fromtimestamp(row['end_time'], tz=timezone_utc), subgroup=row['subgroup'], value=row['value']) for row in installation_counts ] batch_create_table_data(server, RemoteInstallationCount, row_objects) if realmauditlog_rows is not None: row_objects = [ RemoteRealmAuditLog( realm_id=row['realm'], remote_id=row['id'], server=server, event_time=datetime.datetime.fromtimestamp(row['event_time'], tz=timezone_utc), backfilled=row['backfilled'], extra_data=row['extra_data'], event_type=row['event_type']) for row in realmauditlog_rows ] batch_create_table_data(server, RemoteRealmAuditLog, row_objects) return json_success()
def update_user_backend( request: HttpRequest, user_profile: UserProfile, user_id: int, full_name: Optional[str] = REQ(default=None, validator=check_string), role: Optional[int] = REQ(default=None, validator=check_int_in( UserProfile.ROLE_TYPES, )), profile_data: Optional[List[Dict[str, Optional[Union[ int, str, List[int]]]]]] = REQ( default=None, validator=check_list( check_dict( [('id', check_int)], value_validator=check_none_or( check_union([ check_int, check_string, check_list(check_int), ])), ), ), ), ) -> HttpResponse: target = access_user_by_id(user_profile, user_id, allow_deactivated=True, allow_bots=True) if role is not None and target.role != role: if target.role == UserProfile.ROLE_REALM_OWNER and check_last_owner( user_profile): return json_error( _('The owner permission cannot be removed from the only organization owner.' )) if UserProfile.ROLE_REALM_OWNER in [ role, target.role ] and not user_profile.is_realm_owner: raise OrganizationOwnerRequired() do_change_user_role(target, role) if (full_name is not None and target.full_name != full_name and full_name.strip() != ""): # We don't respect `name_changes_disabled` here because the request # is on behalf of the administrator. check_change_full_name(target, full_name, user_profile) if profile_data is not None: clean_profile_data = [] for entry in profile_data: assert isinstance(entry["id"], int) if entry["value"] is None or not entry["value"]: field_id = entry["id"] check_remove_custom_profile_field_value(target, field_id) else: clean_profile_data.append({ "id": entry["id"], "value": entry["value"], }) validate_user_custom_profile_data(target.realm.id, clean_profile_data) do_update_user_custom_profile_data_if_changed(target, clean_profile_data) return json_success()
def remote_server_post_analytics( request: HttpRequest, entity: Union[UserProfile, RemoteZulipServer], realm_counts: List[Dict[str, Any]] = REQ(json_validator=check_list( check_dict_only([ ("property", check_string), ("realm", check_int), ("id", check_int), ("end_time", check_float), ("subgroup", check_none_or(check_string)), ("value", check_int), ]))), installation_counts: List[Dict[str, Any]] = REQ(json_validator=check_list( check_dict_only([ ("property", check_string), ("id", check_int), ("end_time", check_float), ("subgroup", check_none_or(check_string)), ("value", check_int), ]))), realmauditlog_rows: Optional[List[Dict[str, Any]]] = REQ( json_validator=check_list( check_dict_only([ ("id", check_int), ("realm", check_int), ("event_time", check_float), ("backfilled", check_bool), ("extra_data", check_none_or(check_string)), ("event_type", check_int), ])), default=None, ), ) -> HttpResponse: server = validate_entity(entity) validate_incoming_table_data(server, RemoteRealmCount, realm_counts, True) validate_incoming_table_data(server, RemoteInstallationCount, installation_counts, True) if realmauditlog_rows is not None: validate_incoming_table_data(server, RemoteRealmAuditLog, realmauditlog_rows) row_objects = [ RemoteRealmCount( property=row["property"], realm_id=row["realm"], remote_id=row["id"], server=server, end_time=datetime.datetime.fromtimestamp(row["end_time"], tz=datetime.timezone.utc), subgroup=row["subgroup"], value=row["value"], ) for row in realm_counts ] batch_create_table_data(server, RemoteRealmCount, row_objects) row_objects = [ RemoteInstallationCount( property=row["property"], remote_id=row["id"], server=server, end_time=datetime.datetime.fromtimestamp(row["end_time"], tz=datetime.timezone.utc), subgroup=row["subgroup"], value=row["value"], ) for row in installation_counts ] batch_create_table_data(server, RemoteInstallationCount, row_objects) if realmauditlog_rows is not None: row_objects = [ RemoteRealmAuditLog( realm_id=row["realm"], remote_id=row["id"], server=server, event_time=datetime.datetime.fromtimestamp( row["event_time"], tz=datetime.timezone.utc), backfilled=row["backfilled"], extra_data=row["extra_data"], event_type=row["event_type"], ) for row in realmauditlog_rows ] batch_create_table_data(server, RemoteRealmAuditLog, row_objects) return json_success()
for_admin=True) if target.is_bot: assert target.bot_type is not None check_bot_creation_policy(user_profile, target.bot_type) do_reactivate_user(target, acting_user=user_profile) return json_success() check_profile_data: Validator[List[Dict[str, Optional[Union[ int, str, List[int]]]]]] = check_list( check_dict_only([ ("id", check_int), ( "value", check_none_or( check_union( [check_int, check_string, check_list(check_int)]), ), ), ]), ) @has_request_variables def update_user_backend( request: HttpRequest, user_profile: UserProfile, user_id: int, full_name: Optional[str] = REQ(default=None, json_validator=check_string), role: Optional[int] = REQ( default=None, json_validator=check_int_in(UserProfile.ROLE_TYPES, ), ),
check_int, check_int_in, check_list, check_none_or, check_string, check_union, check_url, equals, ) from zerver.models import Realm, Stream, UserProfile # These fields are used for "stream" events, and are included in the # larger "subscription" events that also contain personal settings. basic_stream_fields = [ ("description", check_string), ("first_message_id", check_none_or(check_int)), ("history_public_to_subscribers", check_bool), ("invite_only", check_bool), ("is_announcement_only", check_bool), ("is_web_public", check_bool), ("message_retention_days", equals(None)), ("name", check_string), ("rendered_description", check_string), ("stream_id", check_int), ("stream_post_policy", check_int), ("date_created", check_int), ] subscription_fields: Sequence[Tuple[str, Validator[object]]] = [ *basic_stream_fields, ("audible_notifications", check_none_or(check_bool)),
def get_story_update_batch_body(payload: WildValue, action: WildValue) -> Optional[str]: # When the user selects one or more stories with the checkbox, they can perform # a batch update on multiple stories while changing multiple attribtues at the # same time. changes = action["changes"] kwargs = { "name_template": STORY_NAME_TEMPLATE.format( name=action["name"].tame(check_string), app_url=action["app_url"].tame(check_string), ), "workflow_state_template": "", } templates = [] last_change = "other" move_sub_templates = [] if "epic_id" in changes: last_change = "epic" epic_id = changes["epic_id"] old_reference = get_reference_by_id( payload, epic_id.get("old").tame(check_none_or(check_int)) ) new_reference = get_reference_by_id( payload, epic_id.get("new").tame(check_none_or(check_int)) ) move_sub_templates.append( STORY_UPDATE_BATCH_CHANGED_SUB_TEMPLATE.format( entity_type="Epic", old=None if old_reference is None else old_reference["name"].tame(check_string), new=None if new_reference is None else new_reference["name"].tame(check_string), ) ) if "project_id" in changes: last_change = "project" project_id = changes["project_id"] old_reference = get_reference_by_id( payload, project_id.get("old").tame(check_none_or(check_int)) ) new_reference = get_reference_by_id( payload, project_id.get("new").tame(check_none_or(check_int)) ) move_sub_templates.append( STORY_UPDATE_BATCH_CHANGED_SUB_TEMPLATE.format( entity_type="Project", old=None if old_reference is None else old_reference["name"].tame(check_string), new=None if new_reference is None else new_reference["name"].tame(check_string), ) ) if len(move_sub_templates) > 0: templates.append( STORY_UPDATE_BATCH_CHANGED_TEMPLATE.format( operation="was moved", sub_templates=", ".join(move_sub_templates), ) ) if "story_type" in changes: last_change = "type" story_type = changes["story_type"] templates.append( STORY_UPDATE_BATCH_CHANGED_TEMPLATE.format( operation="{} changed".format("was" if len(templates) == 0 else "and"), sub_templates=STORY_UPDATE_BATCH_CHANGED_SUB_TEMPLATE.format( entity_type="type", old=story_type.get("old").tame(check_none_or(check_string)), new=story_type.get("new").tame(check_none_or(check_string)), ), ) ) if "label_ids" in changes: label_ids = changes["label_ids"] # If this is a payload for when no label is added, ignore it if "adds" in label_ids: label_ids_added = label_ids["adds"].tame(check_list(check_int)) last_change = "label" labels = get_story_joined_label_list(payload, action, label_ids_added) templates.append( STORY_UPDATE_BATCH_ADD_REMOVE_TEMPLATE.format( operation="{} added".format("was" if len(templates) == 0 else "and"), entity="the new label{plural} {labels}".format( plural="s" if len(label_ids) > 1 else "", labels=labels ), ) ) if "workflow_state_id" in changes: last_change = "state" workflow_state_id = changes["workflow_state_id"] old_reference = get_reference_by_id( payload, workflow_state_id.get("old").tame(check_none_or(check_int)) ) new_reference = get_reference_by_id( payload, workflow_state_id.get("new").tame(check_none_or(check_int)) ) kwargs.update( workflow_state_template=TRAILING_WORKFLOW_STATE_CHANGE_TEMPLATE.format( old=None if old_reference is None else old_reference["name"].tame(check_string), new=None if new_reference is None else new_reference["name"].tame(check_string), ) ) # Use the default template for state change if it is the only one change. if len(templates) <= 1 or (len(templates) == 0 and last_change == "state"): event: str = "{}_{}".format("story_update", last_change) alternative_body_func = EVENT_BODY_FUNCTION_MAPPER.get(event) # If last_change is not one of "epic", "project", "type", "label" and "state" # we should ignore the action as there is no way for us to render the changes. if alternative_body_func is None: return None return alternative_body_func(payload, action) kwargs.update(templates=", ".join(templates)) return STORY_UPDATE_BATCH_TEMPLATE.format(**kwargs)
allow_deactivated=True, allow_bots=True) if target.is_bot: assert target.bot_type is not None check_bot_creation_policy(user_profile, target.bot_type) do_reactivate_user(target, acting_user=user_profile) return json_success() check_profile_data: Validator[List[Dict[str, Optional[Union[ int, str, List[int]]]]]] = check_list( check_dict( [('id', check_int)], value_validator=check_none_or( check_union([ check_int, check_string, check_list(check_int), ])), ), ) @has_request_variables def update_user_backend( request: HttpRequest, user_profile: UserProfile, user_id: int, full_name: Optional[str] = REQ(default=None, validator=check_string), role: Optional[int] = REQ(default=None, validator=check_int_in( UserProfile.ROLE_TYPES, )), profile_data: Optional[List[Dict[str, Optional[Union[