def read_fixture_obj(filename): if not filename in OBJ_CACHE: OBJ_CACHE[filename] = orjson.loads(read_fixture_str(filename)) return OBJ_CACHE[filename]
def _process_component_request(request: HttpRequest, component_request: ComponentRequest) -> Dict: """ Process a `ComponentRequest`: 1. construct a Component view 2. set all of the properties on the view from the data 3. execute the type - update the properties based on the payload for "syncInput" - create/update the Django Model based on the payload for "dbInput" - call the method specified for "callMethod" 4. validate any fields specified in a Django form 5. construct a `dict` that will get returned in a `JsonResponse` later on Args: param request: HttpRequest for the function-based view. param: component_request: Component request to process. Returns: `dict` with the following structure: { "id": component_id, "dom": html, // re-rendered version of the component after actions in the payload are completed "data": {}, // updated data after actions in the payload are completed "errors": {}, // form validation errors "return": {}, // optional return value from an executed action "parent": {}, // optional representation of the parent component } """ component = UnicornView.create( component_id=component_request.id, component_name=component_request.name, request=request, ) # Get a deepcopy of the data passed in to determine what fields are updated later original_data = copy.deepcopy(component_request.data) # Set component properties based on request data for (property_name, property_value) in component_request.data.items(): set_property_from_data(component, property_name, property_value) component.hydrate() validate_all_fields = False is_reset_called = False is_refresh_called = False return_data = None partials = [] for action in component_request.action_queue: if action.partial: partials.append(action.partial) if action.action_type == "syncInput": sync_input.handle(component_request, component, action.payload) elif action.action_type == "dbInput": db_input.handle(component, action.payload) elif action.action_type == "callMethod": ( component, _is_refresh_called, _is_reset_called, _validate_all_fields, return_data, ) = call_method.handle(component_request, component, action.payload) is_refresh_called = is_refresh_called | _is_refresh_called is_reset_called = is_reset_called | _is_reset_called validate_all_fields = validate_all_fields | _validate_all_fields else: raise UnicornViewError( f"Unknown action_type '{action.action_type}'") component.complete() # Re-load frontend context variables to deal with non-serializable properties component_request.data = orjson.loads( component.get_frontend_context_variables()) # Send back all available data for reset or refresh actions updated_data = component_request.data if not is_reset_called: if not is_refresh_called: updated_data = {} for key, value in original_data.items(): if value != component_request.data.get(key): updated_data[key] = component_request.data.get(key) if validate_all_fields: component.validate() else: component.validate(model_names=list(updated_data.keys())) rendered_component = component.render() component.rendered(rendered_component) cache = caches[get_cache_alias()] try: cache.set(component.component_cache_key, get_cacheable_component(component)) except UnicornCacheError as e: logger.warning(e) partial_doms = [] if partials and all(partials): soup = BeautifulSoup(rendered_component, features="html.parser") for partial in partials: partial_found = False only_id = False only_key = False target = partial.get("target") if not target: target = partial.get("key") if target: only_key = True if not target: target = partial.get("id") if target: only_id = True assert target, "Partial target is required" if not only_id: for element in soup.find_all(): if ("unicorn:key" in element.attrs and element.attrs["unicorn:key"] == target): partial_doms.append({ "key": target, "dom": str(element) }) partial_found = True break if not partial_found and not only_key: for element in soup.find_all(): if "id" in element.attrs and element.attrs["id"] == target: partial_doms.append({ "id": target, "dom": str(element) }) partial_found = True break res = { "id": component_request.id, "data": updated_data, "errors": component.errors, "calls": component.calls, "checksum": generate_checksum(orjson.dumps(component_request.data)), } if partial_doms: res.update({"partials": partial_doms}) else: hash = generate_checksum(rendered_component) if (component_request.hash == hash and (not return_data or not return_data.value) and not component.calls): raise RenderNotModified() res.update({ "dom": rendered_component, "hash": hash, }) if return_data: res.update({ "return": return_data.get_data(), }) if return_data.redirect: res.update({ "redirect": return_data.redirect, }) if return_data.poll: res.update({ "poll": return_data.poll, }) parent_component = component.parent if parent_component: # TODO: Should parent_component.hydrate() be called? parent_frontend_context_variables = loads( parent_component.get_frontend_context_variables()) parent_checksum = generate_checksum( dumps(parent_frontend_context_variables)) parent = { "id": parent_component.component_id, "checksum": parent_checksum, } if not partial_doms: parent_dom = parent_component.render() component.parent_rendered(parent_dom) try: cache.set( parent_component.component_cache_key, get_cacheable_component(parent_component), ) except UnicornCacheError as e: logger.warning(e) parent.update({ "dom": parent_dom, "data": parent_frontend_context_variables, "errors": parent_component.errors, }) res.update({"parent": parent}) return res
async def beat_listener(mpsc): print("listener launched") async for _channel, msg in mpsc.iter(): # print("message", flush=True) await sio.emit("message", json.loads(msg))
def list_data(self, request, formatter): """ Returns a list of requested object objects """ # Todo: Fix if request.method == "POST": if self.site.is_json(request.META.get("CONTENT_TYPE")): q = orjson.loads(request.body) else: q = { str(k): v[0] if len(v) == 1 else v for k, v in request.POST.lists() } else: q = { str(k): v[0] if len(v) == 1 else v for k, v in request.GET.lists() } # Apply row limit if necessary limit = q.get(self.limit_param, self.unlimited_row_limit) if limit: try: limit = max(int(limit), 0) except ValueError: return HttpResponse(400, "Invalid %s param" % self.limit_param) if limit and limit < 0: return HttpResponse(400, "Invalid %s param" % self.limit_param) # page = q.get(self.page_param) start = q.get(self.start_param) or 0 if start: try: start = max(int(start), 0) except ValueError: return HttpResponse(400, "Invalid %s param" % self.start_param) elif start and start < 0: return HttpResponse(400, "Invalid %s param" % self.start_param) query = q.get(self.query_param) only = q.get(self.only_param) if only: only = only.split(",") ordering = [] if request.is_extjs and self.sort_param in q: for r in self.deserialize(q[self.sort_param]): if r["direction"] == "DESC": ordering += ["-%s" % r["property"]] else: ordering += [r["property"]] grouping = None if request.is_extjs and self.group_param in q: r = self.deserialize(q[self.group_param]) if r["direction"] == "DESC": grouping = "-%s" % r["property"] else: grouping = r["property"] fs = None fav_items = None if self.fav_status in q: fs = q.pop(self.fav_status) == "true" xaa, ordering = self.extra_query(q, ordering) q = self.cleaned_query(q) if None in q: w = [] p = [] for x in q.pop(None): if type(x) in (list, tuple): w += [x[0]] p += x[1] else: w += [x] xa = {"where": w} if p: xa["params"] = p if xaa: # data = self.queryset(request, query).filter(**q).extra(**xaa) xa.update(xaa) data = self.queryset(request, query).filter(**q).extra(**xa) elif xaa: # ExtraQuery data = self.queryset(request, query).filter(**q).extra(**xaa) else: data = self.queryset(request, query).filter(**q) # Favorites filter if fs is not None: fav_items = self.get_favorite_items(request.user) if fs: data = data.filter(id__in=fav_items) elif isinstance(data, QuerySet): # Model data = data.exclude(id__in=fav_items) else: # Doc data = data.filter(id__nin=fav_items) # Store unpaged/unordered queryset unpaged_data = data # Select related records when fetching for models if hasattr(data, "_as_sql"): # For Models only data = data.select_related() # Apply sorting ordering = ordering or self.default_ordering if ordering: data = data.order_by(*ordering) if grouping: ordering.insert(0, grouping) # Apply row limit if necessary if self.row_limit: limit = min(limit or self.row_limit, self.row_limit + 1) # Apply paging if limit: data = data[start:start + limit] # Fetch and format data out = [formatter(o, fields=only) for o in data] if self.row_limit and len(out) > self.row_limit + 1: return self.response("System records limit exceeded (%d records)" % self.row_limit, status=self.TOO_LARGE) # Set favorites if not only and formatter == self.instance_to_dict: if fav_items is None: fav_items = self.get_favorite_items(request.user) for r in out: r[self.fav_status] = r[self.pk] in fav_items # Bulk update result. Enrich with proper fields out = self.clean_list_data(out) # if request.is_extjs: ld = len(out) if limit and (ld == limit or start > 0): total = unpaged_data.count() else: total = ld out = {"total": total, "success": True, "data": out} return self.response(out, status=self.OK)
def loads(string: str, **kwargs: Any) -> Any: return orjson.loads(string)
def test_error_handling(self) -> None: processed = [] @queue_processors.assign_queue("unreliable_worker", is_test_queue=True) class UnreliableWorker(queue_processors.QueueProcessingWorker): def consume(self, data: Mapping[str, Any]) -> None: if data["type"] == "unexpected behaviour": raise Exception("Worker task not performing as expected!") processed.append(data["type"]) fake_client = self.FakeClient() for msg in ["good", "fine", "unexpected behaviour", "back to normal"]: fake_client.enqueue("unreliable_worker", {"type": msg}) fn = os.path.join(settings.QUEUE_ERROR_DIR, "unreliable_worker.errors") try: os.remove(fn) except OSError: # nocoverage # error handling for the directory not existing pass with simulated_queue_client(lambda: fake_client): worker = UnreliableWorker() worker.setup() with self.assertLogs(level="ERROR") as m: worker.start() self.assertEqual( m.records[0].message, "Problem handling data on queue unreliable_worker") self.assertIn(m.records[0].stack_info, m.output[0]) self.assertEqual(processed, ["good", "fine", "back to normal"]) with open(fn) as f: line = f.readline().strip() events = orjson.loads(line.split("\t")[1]) self.assert_length(events, 1) event = events[0] self.assertEqual(event["type"], "unexpected behaviour") processed = [] @queue_processors.assign_queue("unreliable_loopworker", is_test_queue=True) class UnreliableLoopWorker(queue_processors.LoopQueueProcessingWorker): def consume_batch(self, events: List[Dict[str, Any]]) -> None: for event in events: if event["type"] == "unexpected behaviour": raise Exception( "Worker task not performing as expected!") processed.append(event["type"]) for msg in ["good", "fine", "unexpected behaviour", "back to normal"]: fake_client.enqueue("unreliable_loopworker", {"type": msg}) fn = os.path.join(settings.QUEUE_ERROR_DIR, "unreliable_loopworker.errors") try: os.remove(fn) except OSError: # nocoverage # error handling for the directory not existing pass with simulated_queue_client(lambda: fake_client): loopworker = UnreliableLoopWorker() loopworker.setup() with self.assertLogs(level="ERROR") as m: loopworker.start() self.assertEqual( m.records[0].message, "Problem handling data on queue unreliable_loopworker") self.assertIn(m.records[0].stack_info, m.output[0]) self.assertEqual(processed, ["good", "fine"]) with open(fn) as f: line = f.readline().strip() events = orjson.loads(line.split("\t")[1]) self.assert_length(events, 4) self.assertEqual( [event["type"] for event in events], ["good", "fine", "unexpected behaviour", "back to normal"], )
async def put_member(request: Request, service_id: int, version: int, auth: PodApiRequestAuth = Depends(PodApiRequestAuth)): ''' Update the membership of the service to the specified version. :param service_id: service_id of the service :param version: version of the service schema :raises: HTTPException 409 ''' _LOGGER.debug(f'Put Member API called from {request.client.host}') await auth.authenticate() server: PodServer = config.server account: Account = server.account # Authorization: handled by PodApiRequestsAuth, which checks the # cert / JWT was for an account and its account ID matches that # of the pod await account.load_memberships() member: Member = account.memberships.get(service_id) if not member: raise HTTPException( status_code=404, detail=f'Not a member of service with ID {service_id}') current_version = member.schema.version if current_version == version: raise HTTPException( status_code=409, detail=(f'Already a member of service {service_id} with version ' f'{version}')) if current_version > version: raise HTTPException( status_code=409, detail=('Can not downgrade membership from version ' f'{current_version} to {version}')) # Get the latest list of services from the directory server await server.get_registered_services() network: Network = account.network service_summary = network.service_summaries.get(service_id) if not service_summary: raise HTTPException( status_code=404, detail=(f'Service {service_id} not available in network ' f'{network.network}')) if service_summary['version'] != version: raise HTTPException( status_code=404, detail=(f'Version {version} for service {service_id} not known in ' 'the network')) service: Service = network.services.get(service_id) if not service: raise ValueError(f'Service {service_id} not found in the membership') if not service.schema: raise ValueError(f'Schema for service {service_id} not loaded') if service.schema.version != version: text = service.dowload_schema(save=False) contract_data = orjson.loads(text) contract_version = contract_data['version'] if contract_data['version'] != version: raise HTTPException( status_code=404, detail=( f'Service {service_id} only has version {contract_version}' ' available')) service.save_schema(contract_data) # Load the newly downloaded and saved schema. member.load_schema() # We create a new instance of the Service as to make sure # we fully initialize all the applicable data structures new_service = Service.get_service(network) network.services[service_id] = new_service # BUG: any additional workers also need to join the service member.upgrade()
def from_json(string): """Create a Python object from a JSON string""" return json.loads(string)
def create_dataset(): """ Generates generates partial json files of the following formatnano { "<paper_id pid>" : { "<name>" : "<>" "<no of citations>" : <> "<fields of study>" : [["<>", <>],...] },... } :return: """ global paper_data count = 1 fos_missing_count = 0 # full_data = dict() with open("dblp.v11/citation_selected_attr.txt", "r") as selected_attr_file: reader = csv.reader(selected_attr_file, delimiter=' ') selected_attr = [ str(row[0]).replace('"', '').lower() for row in reader ] N = len(selected_attr) print(N) G = networkx.DiGraph() with open("dblp.v11/dblp_papers_v11.txt", "r") as dblp_file: print("File opened") for line in dblp_file: paper_dict = orjson.loads(line) pid = paper_dict["id"] paper_id, references, edges = create_edges(pid, paper_dict) G.add_node(paper_id) G.add_nodes_from(references) G.add_edges_from(edges) create_attr(paper_id, paper_dict, selected_attr, N) if count % 100000 == 0: # output the current stored paper_id attributes into a json and clear memory gc.collect() print(count) count += 1 E = G.edges() print("Writing edgelist", len(E)) with open("dblp.v11/citation_edgelist.txt", "w", newline='') as co_author_edge_file: writer = csv.writer(co_author_edge_file, delimiter=" ") for edge in E: writer.writerow(edge) # output the last stored paper_id attributes into a json and clear memory print(count) paper_items = paper_data.items() # del paper_data print("Writing fos", len(paper_data[0])) # 4107340 with open(f"dblp.v11/citation_attr.txt", "w", newline='') as co_author_attr_file: writer = csv.writer(co_author_attr_file, delimiter=" ") for id, vector in paper_items: data = [id] + vector writer.writerow(data) del paper_items
def test_invalid(input): with pytest.raises(orjson.JSONDecodeError): orjson.loads(input)
def _load_spacegroups(): global SPACEGROUPS if SPACEGROUPS is None: SPACEGROUPS = orjson.loads( pkg_resources.resource_string(__name__, "data/spacegroups.json")) return SPACEGROUPS
def notify(request: HttpRequest) -> HttpResponse: in_tornado_thread(process_notification)(orjson.loads(request.POST["data"])) return json_success(request)
async def _handle_battle_message(self, split_messages: List[List[str]]) -> None: """Handles a battle message. :param split_message: The received battle message. :type split_message: str """ # Battle messages can be multiline if ( len(split_messages) > 1 and len(split_messages[1]) > 1 and split_messages[1][1] == "init" ): battle_info = split_messages[0][0].split("-") battle = await self._create_battle(battle_info) split_messages.pop(0) else: battle = await self._get_battle(split_messages[0][0]) for split_message in split_messages[1:]: if len(split_message) <= 1: continue elif split_message[1] in self.MESSAGES_TO_IGNORE: pass elif split_message[1] == "request": if split_message[2]: request = orjson.loads(split_message[2]) battle._parse_request(request) if battle.move_on_next_request: await self._handle_battle_request(battle) battle.move_on_next_request = False elif split_message[1] == "title": player_1, player_2 = split_message[2].split(" vs. ") battle.players = player_1, player_2 elif split_message[1] == "win" or split_message[1] == "tie": if split_message[1] == "win": battle._won_by(split_message[2]) else: battle._tied() await self._battle_count_queue.get() self._battle_count_queue.task_done() self._battle_finished_callback(battle) async with self._battle_end_condition: self._battle_end_condition.notify_all() elif split_message[1] == "error": self.logger.warning( "Error message received: %s", "|".join(split_message) ) if split_message[2].startswith( "[Invalid choice] Sorry, too late to make a different move" ): if battle.trapped: await self._handle_battle_request(battle) elif split_message[2].startswith( "[Unavailable choice] Can't switch: The active Pokémon is " "trapped" ) or split_message[2].startswith( "[Invalid choice] Can't switch: The active Pokémon is trapped" ): battle.trapped = True await self._handle_battle_request(battle) elif split_message[2].startswith( "[Invalid choice] Can't switch: You can't switch to an active " "Pokémon" ): await self._handle_battle_request(battle, maybe_default_order=True) elif split_message[2].startswith( "[Invalid choice] Can't switch: You can't switch to a fainted " "Pokémon" ): await self._handle_battle_request(battle, maybe_default_order=True) elif split_message[2].startswith( "[Invalid choice] Can't move: Invalid target for" ): await self._handle_battle_request(battle, maybe_default_order=True) elif split_message[2].startswith( "[Invalid choice] Can't move: You can't choose a target for" ): await self._handle_battle_request(battle, maybe_default_order=True) elif split_message[2].startswith( "[Invalid choice] Can't move: " ) and split_message[2].endswith("needs a target"): await self._handle_battle_request(battle, maybe_default_order=True) elif ( split_message[2].startswith("[Invalid choice] Can't move: Your") and " doesn't have a move matching " in split_message[2] ): await self._handle_battle_request(battle, maybe_default_order=True) elif split_message[2].startswith( "[Invalid choice] Incomplete choice: " ): await self._handle_battle_request(battle, maybe_default_order=True) elif split_message[2].startswith("[Invalid choice]"): self._manage_error_in(battle) elif split_message[2].startswith( "[Unavailable choice]" ) and split_message[2].endswith("is disabled"): self._manage_error_in(battle) battle.move_on_next_request = True else: self.logger.critical("Unexpected error message: %s", split_message) elif split_message[1] == "turn": battle.turn = int(split_message[2]) await self._handle_battle_request(battle) elif split_message[1] == "teampreview": await self._handle_battle_request(battle, from_teampreview_request=True) else: try: battle._parse_message(split_message) except UnexpectedEffectException as e: self.logger.exception(e)
def read_json_logs_from_stdout(capsys: CaptureFixture) -> List[dict]: return [orjson.loads(log) for log in parse_logs_from_stdout(capsys)]
def generate_and_send_messages( data: Tuple[int, Sequence[Sequence[int]], Mapping[str, Any], int]) -> int: (tot_messages, personals_pairs, options, random_seed) = data random.seed(random_seed) with open( os.path.join(get_or_create_dev_uuid_var_path("test-backend"), "test_messages.json"), "rb") as infile: dialog = orjson.loads(infile.read()) random.shuffle(dialog) texts = itertools.cycle(dialog) # We need to filter out streams from the analytics realm as we don't want to generate # messages to its streams - and they might also have no subscribers, which would break # our message generation mechanism below. stream_ids = Stream.objects.filter(realm=get_realm("zulip")).values_list( "id", flat=True) recipient_streams: List[int] = [ recipient.id for recipient in Recipient.objects.filter(type=Recipient.STREAM, type_id__in=stream_ids) ] recipient_huddles: List[int] = [ h.id for h in Recipient.objects.filter(type=Recipient.HUDDLE) ] huddle_members: Dict[int, List[int]] = {} for h in recipient_huddles: huddle_members[h] = [ s.user_profile.id for s in Subscription.objects.filter(recipient_id=h) ] # Generate different topics for each stream possible_topics = {} for stream_id in recipient_streams: possible_topics[stream_id] = generate_topics(options["max_topics"]) message_batch_size = options["batch_size"] num_messages = 0 random_max = 1000000 recipients: Dict[int, Tuple[int, int, Dict[str, Any]]] = {} messages: List[Message] = [] while num_messages < tot_messages: saved_data: Dict[str, Any] = {} message = Message() message.sending_client = get_client("populate_db") message.content = next(texts) randkey = random.randint(1, random_max) if (num_messages > 0 and random.randint(1, random_max) * 100.0 / random_max < options["stickyness"]): # Use an old recipient message_type, recipient_id, saved_data = recipients[num_messages - 1] if message_type == Recipient.PERSONAL: personals_pair = saved_data["personals_pair"] random.shuffle(personals_pair) elif message_type == Recipient.STREAM: message.subject = saved_data["subject"] message.recipient = get_recipient_by_id(recipient_id) elif message_type == Recipient.HUDDLE: message.recipient = get_recipient_by_id(recipient_id) elif randkey <= random_max * options["percent_huddles"] / 100.0: message_type = Recipient.HUDDLE message.recipient = get_recipient_by_id( random.choice(recipient_huddles)) elif (randkey <= random_max * (options["percent_huddles"] + options["percent_personals"]) / 100.0): message_type = Recipient.PERSONAL personals_pair = random.choice(personals_pairs) random.shuffle(personals_pair) elif randkey <= random_max * 1.0: message_type = Recipient.STREAM message.recipient = get_recipient_by_id( random.choice(recipient_streams)) if message_type == Recipient.HUDDLE: sender_id = random.choice(huddle_members[message.recipient.id]) message.sender = get_user_profile_by_id(sender_id) elif message_type == Recipient.PERSONAL: message.recipient = Recipient.objects.get( type=Recipient.PERSONAL, type_id=personals_pair[0]) message.sender = get_user_profile_by_id(personals_pair[1]) saved_data["personals_pair"] = personals_pair elif message_type == Recipient.STREAM: # Pick a random subscriber to the stream message.sender = random.choice( list(Subscription.objects.filter( recipient=message.recipient))).user_profile message.subject = random.choice( possible_topics[message.recipient.id]) saved_data["subject"] = message.subject message.date_sent = choose_date_sent(num_messages, tot_messages, options["threads"]) messages.append(message) recipients[num_messages] = (message_type, message.recipient.id, saved_data) num_messages += 1 if (num_messages % message_batch_size) == 0: # Send the batch and empty the list: send_messages(messages) messages = [] if len(messages) > 0: # If there are unsent messages after exiting the loop, send them: send_messages(messages) return tot_messages
def parse(self, stream, media_type=None, parser_context=None): parser_context = parser_context or {} encoding = parser_context.get('encoding', settings.DEFAULT_CHARSET) decoded_stream = codecs.getreader(encoding)(stream) return orjson.loads(decoded_stream.read())
def handle(self, *args: Any, **options: Any) -> None: # Suppress spammy output from the push notifications logger push_notifications_logger.disabled = True if options["percent_huddles"] + options["percent_personals"] > 100: self.stderr.write( "Error! More than 100% of messages allocated.\n") return # Get consistent data for backend tests. if options["test_suite"]: random.seed(0) with connection.cursor() as cursor: # Sometimes bugs relating to confusing recipient.id for recipient.type_id # or <object>.id for <object>.recipient_id remain undiscovered by the test suite # due to these numbers happening to coincide in such a way that it makes tests # accidentally pass. By bumping the Recipient.id sequence by a large enough number, # we can have those ids in a completely different range of values than object ids, # eliminatng the possibility of such coincidences. cursor.execute("SELECT setval('zerver_recipient_id_seq', 100)") # If max_topics is not set, we set it proportional to the # number of messages. if options["max_topics"] is None: options["max_topics"] = 1 + options["num_messages"] // 100 if options["delete"]: # Start by clearing all the data in our database clear_database() # Create our three default realms # Could in theory be done via zerver.lib.actions.do_create_realm, but # welcome-bot (needed for do_create_realm) hasn't been created yet create_internal_realm() zulip_realm = do_create_realm( string_id="zulip", name="Zulip Dev", emails_restricted_to_domains=False, email_address_visibility=Realm.EMAIL_ADDRESS_VISIBILITY_ADMINS, description= "The Zulip development environment default organization." " It's great for testing!", invite_required=False, plan_type=Realm.SELF_HOSTED, org_type=Realm.ORG_TYPES["business"]["id"], ) RealmDomain.objects.create(realm=zulip_realm, domain="zulip.com") assert zulip_realm.notifications_stream is not None zulip_realm.notifications_stream.name = "Verona" zulip_realm.notifications_stream.description = "A city in Italy" zulip_realm.notifications_stream.save( update_fields=["name", "description"]) if options["test_suite"]: mit_realm = do_create_realm( string_id="zephyr", name="MIT", emails_restricted_to_domains=True, invite_required=False, plan_type=Realm.SELF_HOSTED, org_type=Realm.ORG_TYPES["business"]["id"], ) RealmDomain.objects.create(realm=mit_realm, domain="mit.edu") lear_realm = do_create_realm( string_id="lear", name="Lear & Co.", emails_restricted_to_domains=False, invite_required=False, plan_type=Realm.SELF_HOSTED, org_type=Realm.ORG_TYPES["business"]["id"], ) # Default to allowing all members to send mentions in # large streams for the test suite to keep # mention-related tests simple. zulip_realm.wildcard_mention_policy = Realm.WILDCARD_MENTION_POLICY_MEMBERS zulip_realm.save(update_fields=["wildcard_mention_policy"]) # Create test Users (UserProfiles are automatically created, # as are subscriptions to the ability to receive personals). names = [ ("Zoe", "*****@*****.**"), ("Othello, the Moor of Venice", "*****@*****.**"), ("Iago", "*****@*****.**"), ("Prospero from The Tempest", "*****@*****.**"), ("Cordelia, Lear's daughter", "*****@*****.**"), ("King Hamlet", "*****@*****.**"), ("aaron", "*****@*****.**"), ("Polonius", "*****@*****.**"), ("Desdemona", "*****@*****.**"), ("शिव", "*****@*****.**"), ] # For testing really large batches: # Create extra users with semi realistic names to make search # functions somewhat realistic. We'll still create 1000 users # like Extra222 User for some predicability. num_names = options["extra_users"] num_boring_names = 300 for i in range(min(num_names, num_boring_names)): full_name = f"Extra{i:03} User" names.append((full_name, f"extrauser{i}@zulip.com")) if num_names > num_boring_names: fnames = [ "Amber", "Arpita", "Bob", "Cindy", "Daniela", "Dan", "Dinesh", "Faye", "François", "George", "Hank", "Irene", "James", "Janice", "Jenny", "Jill", "John", "Kate", "Katelyn", "Kobe", "Lexi", "Manish", "Mark", "Matt", "Mayna", "Michael", "Pete", "Peter", "Phil", "Phillipa", "Preston", "Sally", "Scott", "Sandra", "Steve", "Stephanie", "Vera", ] mnames = ["de", "van", "von", "Shaw", "T."] lnames = [ "Adams", "Agarwal", "Beal", "Benson", "Bonita", "Davis", "George", "Harden", "James", "Jones", "Johnson", "Jordan", "Lee", "Leonard", "Singh", "Smith", "Patel", "Towns", "Wall", ] non_ascii_names = [ "Günter", "أحمد", "Magnús", "आशी", "イツキ", "语嫣", "அருண்", "Александр", "José", ] # to imitate emoji insertions in usernames raw_emojis = ["😎", "😂", "🐱👤"] for i in range(num_boring_names, num_names): fname = random.choice(fnames) + str(i) full_name = fname if random.random() < 0.7: if random.random() < 0.3: full_name += " " + random.choice(non_ascii_names) else: full_name += " " + random.choice(mnames) if random.random() < 0.1: full_name += f" {random.choice(raw_emojis)} " else: full_name += " " + random.choice(lnames) email = fname.lower() + "@zulip.com" names.append((full_name, email)) create_users(zulip_realm, names, tos_version=settings.TOS_VERSION) iago = get_user_by_delivery_email("*****@*****.**", zulip_realm) do_change_user_role(iago, UserProfile.ROLE_REALM_ADMINISTRATOR, acting_user=None) iago.is_staff = True iago.save(update_fields=["is_staff"]) # We need to create at least two test draft for Iago for the sake # of the cURL tests. Two since one will be deleted. Draft.objects.create( user_profile=iago, recipient=None, topic="Release Notes", content="Release 4.0 will contain ...", last_edit_time=datetime.now(), ) Draft.objects.create( user_profile=iago, recipient=None, topic="Release Notes", content= "Release 4.0 will contain many new features such as ... ", last_edit_time=datetime.now(), ) desdemona = get_user_by_delivery_email("*****@*****.**", zulip_realm) do_change_user_role(desdemona, UserProfile.ROLE_REALM_OWNER, acting_user=None) shiva = get_user_by_delivery_email("*****@*****.**", zulip_realm) do_change_user_role(shiva, UserProfile.ROLE_MODERATOR, acting_user=None) guest_user = get_user_by_delivery_email("*****@*****.**", zulip_realm) guest_user.role = UserProfile.ROLE_GUEST guest_user.save(update_fields=["role"]) # These bots are directly referenced from code and thus # are needed for the test suite. zulip_realm_bots = [ ("Zulip Error Bot", "*****@*****.**"), ("Zulip Default Bot", "*****@*****.**"), ] for i in range(options["extra_bots"]): zulip_realm_bots.append( (f"Extra Bot {i}", f"extrabot{i}@zulip.com")) create_users(zulip_realm, zulip_realm_bots, bot_type=UserProfile.DEFAULT_BOT) zoe = get_user_by_delivery_email("*****@*****.**", zulip_realm) zulip_webhook_bots = [ ("Zulip Webhook Bot", "*****@*****.**"), ] # If a stream is not supplied in the webhook URL, the webhook # will (in some cases) send the notification as a PM to the # owner of the webhook bot, so bot_owner can't be None create_users( zulip_realm, zulip_webhook_bots, bot_type=UserProfile.INCOMING_WEBHOOK_BOT, bot_owner=zoe, ) aaron = get_user_by_delivery_email("*****@*****.**", zulip_realm) zulip_outgoing_bots = [ ("Outgoing Webhook", "*****@*****.**"), ] create_users( zulip_realm, zulip_outgoing_bots, bot_type=UserProfile.OUTGOING_WEBHOOK_BOT, bot_owner=aaron, ) outgoing_webhook = get_user("*****@*****.**", zulip_realm) add_service( "outgoing-webhook", user_profile=outgoing_webhook, interface=Service.GENERIC, base_url="http://127.0.0.1:5002", token=generate_api_key(), ) # Add the realm internal bots to each realm. create_if_missing_realm_internal_bots() # Create public streams. signups_stream = Realm.INITIAL_PRIVATE_STREAM_NAME stream_list = [ "Verona", "Denmark", "Scotland", "Venice", "Rome", signups_stream, ] stream_dict: Dict[str, Dict[str, Any]] = { "Denmark": { "description": "A Scandinavian country" }, "Scotland": { "description": "Located in the United Kingdom" }, "Venice": { "description": "A northeastern Italian city" }, "Rome": { "description": "Yet another Italian city", "is_web_public": True }, } bulk_create_streams(zulip_realm, stream_dict) recipient_streams: List[int] = [ Stream.objects.get(name=name, realm=zulip_realm).id for name in stream_list ] # Create subscriptions to streams. The following # algorithm will give each of the users a different but # deterministic subset of the streams (given a fixed list # of users). For the test suite, we have a fixed list of # subscriptions to make sure test data is consistent # across platforms. subscriptions_list: List[Tuple[UserProfile, Recipient]] = [] profiles: Sequence[UserProfile] = list( UserProfile.objects.select_related().filter( is_bot=False).order_by("email")) if options["test_suite"]: subscriptions_map = { "*****@*****.**": ["Verona"], "*****@*****.**": ["Verona"], "*****@*****.**": ["Verona", "Denmark", signups_stream], "*****@*****.**": [ "Verona", "Denmark", "Scotland", signups_stream, ], "*****@*****.**": ["Verona", "Denmark", "Scotland"], "*****@*****.**": ["Verona", "Denmark", "Scotland", "Venice"], "*****@*****.**": ["Verona", "Denmark", "Scotland", "Venice", "Rome"], "*****@*****.**": ["Verona"], "*****@*****.**": [ "Verona", "Denmark", "Venice", signups_stream, ], "*****@*****.**": ["Verona", "Denmark", "Scotland"], } for profile in profiles: email = profile.delivery_email if email not in subscriptions_map: raise Exception( f"Subscriptions not listed for user {email}") for stream_name in subscriptions_map[email]: stream = Stream.objects.get(name=stream_name, realm=zulip_realm) r = Recipient.objects.get(type=Recipient.STREAM, type_id=stream.id) subscriptions_list.append((profile, r)) else: num_streams = len(recipient_streams) num_users = len(profiles) for i, profile in enumerate(profiles): # Subscribe to some streams. fraction = float(i) / num_users num_recips = int(num_streams * fraction) + 1 for type_id in recipient_streams[:num_recips]: r = Recipient.objects.get(type=Recipient.STREAM, type_id=type_id) subscriptions_list.append((profile, r)) subscriptions_to_add: List[Subscription] = [] event_time = timezone_now() all_subscription_logs: (List[RealmAuditLog]) = [] i = 0 for profile, recipient in subscriptions_list: i += 1 color = STREAM_ASSIGNMENT_COLORS[i % len(STREAM_ASSIGNMENT_COLORS)] s = Subscription( recipient=recipient, user_profile=profile, is_user_active=profile.is_active, color=color, ) subscriptions_to_add.append(s) log = RealmAuditLog( realm=profile.realm, modified_user=profile, modified_stream_id=recipient.type_id, event_last_message_id=0, event_type=RealmAuditLog.SUBSCRIPTION_CREATED, event_time=event_time, ) all_subscription_logs.append(log) Subscription.objects.bulk_create(subscriptions_to_add) RealmAuditLog.objects.bulk_create(all_subscription_logs) # Create custom profile field data phone_number = try_add_realm_custom_profile_field( zulip_realm, "Phone number", CustomProfileField.SHORT_TEXT, hint="") biography = try_add_realm_custom_profile_field( zulip_realm, "Biography", CustomProfileField.LONG_TEXT, hint="What are you known for?", ) favorite_food = try_add_realm_custom_profile_field( zulip_realm, "Favorite food", CustomProfileField.SHORT_TEXT, hint="Or drink, if you'd prefer", ) field_data: ProfileFieldData = { "vim": { "text": "Vim", "order": "1" }, "emacs": { "text": "Emacs", "order": "2" }, } favorite_editor = try_add_realm_custom_profile_field( zulip_realm, "Favorite editor", CustomProfileField.SELECT, field_data=field_data) birthday = try_add_realm_custom_profile_field( zulip_realm, "Birthday", CustomProfileField.DATE) favorite_website = try_add_realm_custom_profile_field( zulip_realm, "Favorite website", CustomProfileField.URL, hint="Or your personal blog's URL", ) mentor = try_add_realm_custom_profile_field( zulip_realm, "Mentor", CustomProfileField.USER) github_profile = try_add_realm_default_custom_profile_field( zulip_realm, "github") # Fill in values for Iago and Hamlet hamlet = get_user_by_delivery_email("*****@*****.**", zulip_realm) do_update_user_custom_profile_data_if_changed( iago, [ { "id": phone_number.id, "value": "+1-234-567-8901" }, { "id": biography.id, "value": "Betrayer of Othello." }, { "id": favorite_food.id, "value": "Apples" }, { "id": favorite_editor.id, "value": "emacs" }, { "id": birthday.id, "value": "2000-01-01" }, { "id": favorite_website.id, "value": "https://zulip.readthedocs.io/en/latest/" }, { "id": mentor.id, "value": [hamlet.id] }, { "id": github_profile.id, "value": "zulip" }, ], ) do_update_user_custom_profile_data_if_changed( hamlet, [ { "id": phone_number.id, "value": "+0-11-23-456-7890" }, { "id": biography.id, "value": "I am:\n* The prince of Denmark\n* Nephew to the usurping Claudius", }, { "id": favorite_food.id, "value": "Dark chocolate" }, { "id": favorite_editor.id, "value": "vim" }, { "id": birthday.id, "value": "1900-01-01" }, { "id": favorite_website.id, "value": "https://blog.zulig.org" }, { "id": mentor.id, "value": [iago.id] }, { "id": github_profile.id, "value": "zulipbot" }, ], ) else: zulip_realm = get_realm("zulip") recipient_streams = [ klass.type_id for klass in Recipient.objects.filter(type=Recipient.STREAM) ] # Extract a list of all users user_profiles: List[UserProfile] = list( UserProfile.objects.filter(is_bot=False)) # Create a test realm emoji. IMAGE_FILE_PATH = static_path("images/test-images/checkbox.png") with open(IMAGE_FILE_PATH, "rb") as fp: check_add_realm_emoji(zulip_realm, "green_tick", iago, File(fp)) if not options["test_suite"]: # Populate users with some bar data for user in user_profiles: status: int = UserPresence.ACTIVE date = timezone_now() client = get_client("website") if user.full_name[0] <= "H": client = get_client("ZulipAndroid") UserPresence.objects.get_or_create( user_profile=user, realm_id=user.realm_id, client=client, timestamp=date, status=status, ) user_profiles_ids = [user_profile.id for user_profile in user_profiles] # Create several initial huddles for i in range(options["num_huddles"]): get_huddle(random.sample(user_profiles_ids, random.randint(3, 4))) # Create several initial pairs for personals personals_pairs = [ random.sample(user_profiles_ids, 2) for i in range(options["num_personals"]) ] create_alert_words(zulip_realm.id) # Generate a new set of test data. create_test_data() # prepopulate the URL preview/embed data for the links present # in the config.generate_data.json data set. This makes it # possible for populate_db to run happily without Internet # access. with open("zerver/tests/fixtures/docs_url_preview_data.json", "rb") as f: urls_with_preview_data = orjson.loads(f.read()) for url in urls_with_preview_data: cache_set(url, urls_with_preview_data[url], PREVIEW_CACHE_NAME) if options["delete"]: if options["test_suite"]: # Create test users; the MIT ones are needed to test # the Zephyr mirroring codepaths. testsuite_mit_users = [ ("Fred Sipb (MIT)", "*****@*****.**"), ("Athena Consulting Exchange User (MIT)", "*****@*****.**"), ("Esp Classroom (MIT)", "*****@*****.**"), ] create_users(mit_realm, testsuite_mit_users, tos_version=settings.TOS_VERSION) testsuite_lear_users = [ ("King Lear", "*****@*****.**"), ("Cordelia, Lear's daughter", "*****@*****.**"), ] create_users(lear_realm, testsuite_lear_users, tos_version=settings.TOS_VERSION) if not options["test_suite"]: # To keep the messages.json fixtures file for the test # suite fast, don't add these users and subscriptions # when running populate_db for the test suite # to imitate emoji insertions in stream names raw_emojis = ["😎", "😂", "🐱👤"] zulip_stream_dict: Dict[str, Dict[str, Any]] = { "devel": { "description": "For developing" }, # ビデオゲーム - VideoGames (japanese) "ビデオゲーム": { "description": f"Share your favorite video games! {raw_emojis[2]}" }, "announce": { "description": "For announcements", "stream_post_policy": Stream.STREAM_POST_POLICY_ADMINS, }, "design": { "description": "For design" }, "support": { "description": "For support" }, "social": { "description": "For socializing" }, "test": { "description": "For testing `code`" }, "errors": { "description": "For errors" }, # 조리법 - Recipes (Korean) , Пельмени - Dumplings (Russian) "조리법 " + raw_emojis[0]: { "description": "Everything cooking, from pasta to Пельмени" }, } extra_stream_names = [ "802.11a", "Ad Hoc Network", "Augmented Reality", "Cycling", "DPI", "FAQ", "FiFo", "commits", "Control panel", "desktop", "компьютеры", "Data security", "desktop", "काम", "discussions", "Cloud storage", "GCI", "Vaporware", "Recent Trends", "issues", "live", "Health", "mobile", "空間", "provision", "hidrógeno", "HR", "アニメ", ] # Add stream names and stream descriptions for i in range(options["extra_streams"]): extra_stream_name = random.choice( extra_stream_names) + " " + str(i) # to imitate emoji insertions in stream names if random.random() <= 0.15: extra_stream_name += random.choice(raw_emojis) zulip_stream_dict[extra_stream_name] = { "description": "Auto-generated extra stream.", } bulk_create_streams(zulip_realm, zulip_stream_dict) # Now that we've created the notifications stream, configure it properly. zulip_realm.notifications_stream = get_stream( "announce", zulip_realm) zulip_realm.save(update_fields=["notifications_stream"]) # Add a few default streams for default_stream_name in [ "design", "devel", "social", "support" ]: DefaultStream.objects.create(realm=zulip_realm, stream=get_stream( default_stream_name, zulip_realm)) # Now subscribe everyone to these streams subscribe_users_to_streams(zulip_realm, zulip_stream_dict) create_user_groups() if not options["test_suite"]: # We populate the analytics database here for # development purpose only call_command("populate_analytics_db") threads = options["threads"] jobs: List[Tuple[int, List[List[int]], Dict[str, Any], int]] = [] for i in range(threads): count = options["num_messages"] // threads if i < options["num_messages"] % threads: count += 1 jobs.append( (count, personals_pairs, options, random.randint(0, 10**10))) for job in jobs: generate_and_send_messages(job) if options["delete"]: if not options["test_suite"]: # These bots are not needed by the test suite # Also, we don't want interacting with each other # in dev setup. internal_zulip_users_nosubs = [ ("Zulip Commit Bot", "*****@*****.**"), ("Zulip Trac Bot", "*****@*****.**"), ("Zulip Nagios Bot", "*****@*****.**"), ] create_users(zulip_realm, internal_zulip_users_nosubs, bot_type=UserProfile.DEFAULT_BOT) mark_all_messages_as_read() self.stdout.write("Successfully populated test database.\n") push_notifications_logger.disabled = False
def test_create_external_account_field(self) -> None: self.login("iago") realm = get_realm("zulip") data: Dict[str, Union[str, int, Dict[str, str]]] = {} data["name"] = "Twitter" data["field_type"] = CustomProfileField.EXTERNAL_ACCOUNT data["field_data"] = "invalid" result = self.client_post("/json/realm/profile_fields", info=data) self.assert_json_error(result, "Bad value for 'field_data': invalid") data["field_data"] = orjson.dumps({}).decode() result = self.client_post("/json/realm/profile_fields", info=data) self.assert_json_error(result, "subtype key is missing from field_data") data["field_data"] = orjson.dumps({ "subtype": "", }).decode() result = self.client_post("/json/realm/profile_fields", info=data) self.assert_json_error(result, 'field_data["subtype"] cannot be blank.') data["field_data"] = orjson.dumps({ "subtype": "123", }).decode() result = self.client_post("/json/realm/profile_fields", info=data) self.assert_json_error(result, "Invalid external account type") non_default_external_account = "linkedin" data["field_data"] = orjson.dumps({ "subtype": non_default_external_account, }).decode() result = self.client_post("/json/realm/profile_fields", info=data) self.assert_json_error(result, "Invalid external account type") data["field_data"] = orjson.dumps({ "subtype": "twitter", }).decode() result = self.client_post("/json/realm/profile_fields", info=data) self.assert_json_success(result) twitter_field = CustomProfileField.objects.get(name="Twitter", realm=realm) self.assertEqual(twitter_field.field_type, CustomProfileField.EXTERNAL_ACCOUNT) self.assertEqual(twitter_field.name, "Twitter") self.assertEqual( orjson.loads(twitter_field.field_data)["subtype"], "twitter") data["name"] = "Reddit" data["field_data"] = orjson.dumps({ "subtype": "custom", }).decode() result = self.client_post("/json/realm/profile_fields", info=data) self.assert_json_error( result, "Custom external account must define URL pattern") data["field_data"] = orjson.dumps({ "subtype": "custom", "url_pattern": 123, }).decode() result = self.client_post("/json/realm/profile_fields", info=data) self.assert_json_error(result, 'field_data["url_pattern"] is not a string') data["field_data"] = orjson.dumps({ "subtype": "custom", "url_pattern": "invalid", }).decode() result = self.client_post("/json/realm/profile_fields", info=data) self.assert_json_error(result, "Malformed URL pattern.") data["field_data"] = orjson.dumps({ "subtype": "custom", "url_pattern": "https://www.reddit.com/%(username)s/user/%(username)s", }).decode() result = self.client_post("/json/realm/profile_fields", info=data) self.assert_json_error(result, "Malformed URL pattern.") data["field_data"] = orjson.dumps({ "subtype": "custom", "url_pattern": "reddit.com/%(username)s", }).decode() result = self.client_post("/json/realm/profile_fields", info=data) self.assert_json_error(result, 'field_data["url_pattern"] is not a URL') data["field_data"] = orjson.dumps({ "subtype": "custom", "url_pattern": "https://www.reddit.com/user/%(username)s", }).decode() result = self.client_post("/json/realm/profile_fields", info=data) self.assert_json_success(result) custom_field = CustomProfileField.objects.get(name="Reddit", realm=realm) self.assertEqual(custom_field.field_type, CustomProfileField.EXTERNAL_ACCOUNT) self.assertEqual(custom_field.name, "Reddit") field_data = orjson.loads(custom_field.field_data) self.assertEqual(field_data["subtype"], "custom") self.assertEqual(field_data["url_pattern"], "https://www.reddit.com/user/%(username)s") result = self.client_post("/json/realm/profile_fields", info=data) self.assert_json_error(result, "A field with that label already exists.")
def do_test_realm_update_api(self, name: str) -> None: """Test updating realm properties. If new realm properties have been added to the Realm model but the test_values dict below has not been updated, this will raise an assertion error. """ bool_tests: List[bool] = [False, True] test_values: Dict[str, Any] = dict( default_language=['de', 'en'], default_code_block_language=['javascript', ''], description=['Realm description', 'New description'], digest_weekday=[0, 1, 2], message_retention_days=[10, 20], name=['Zulip', 'New Name'], waiting_period_threshold=[10, 20], create_stream_policy=[ Realm.POLICY_ADMINS_ONLY, Realm.POLICY_MEMBERS_ONLY, Realm.POLICY_FULL_MEMBERS_ONLY ], user_group_edit_policy=[ Realm.USER_GROUP_EDIT_POLICY_ADMINS, Realm.USER_GROUP_EDIT_POLICY_MEMBERS ], private_message_policy=[ Realm.PRIVATE_MESSAGE_POLICY_UNLIMITED, Realm.PRIVATE_MESSAGE_POLICY_DISABLED ], invite_to_stream_policy=[ Realm.POLICY_ADMINS_ONLY, Realm.POLICY_MEMBERS_ONLY, Realm.POLICY_FULL_MEMBERS_ONLY ], wildcard_mention_policy=[ Realm.WILDCARD_MENTION_POLICY_EVERYONE, Realm.WILDCARD_MENTION_POLICY_FULL_MEMBERS, Realm.WILDCARD_MENTION_POLICY_ADMINS ], bot_creation_policy=[1, 2], email_address_visibility=[ Realm.EMAIL_ADDRESS_VISIBILITY_EVERYONE, Realm.EMAIL_ADDRESS_VISIBILITY_ADMINS, Realm.EMAIL_ADDRESS_VISIBILITY_NOBODY ], video_chat_provider=[ dict(video_chat_provider=orjson.dumps( Realm.VIDEO_CHAT_PROVIDERS['jitsi_meet']['id']).decode(), ), ], message_content_delete_limit_seconds=[1000, 1100, 1200]) vals = test_values.get(name) if Realm.property_types[name] is bool: vals = bool_tests if vals is None: raise AssertionError(f'No test created for {name}') if name == 'video_chat_provider': self.set_up_db(name, vals[0][name]) realm = self.update_with_api_multiple_value(vals[0]) self.assertEqual(getattr(realm, name), orjson.loads(vals[0][name])) else: self.set_up_db(name, vals[0]) realm = self.update_with_api(name, vals[1]) self.assertEqual(getattr(realm, name), vals[1]) realm = self.update_with_api(name, vals[0]) self.assertEqual(getattr(realm, name), vals[0])
def _dumps(serialized_data): dict_data = orjson.loads(serialized_data) _fix_floats(dict_data) dumped_data = orjson.dumps(dict_data).decode("utf-8") return dumped_data
def load_json(self, file: str): with open(file, 'r') as f: return json.loads(f.read())
def read(self): for line in io.TextIOWrapper(self.fh, encoding="utf-8"): yield orjson.loads(line)
def deserialize(self, data): return orjson.loads(data)
"title": "First Stars IV" }], "urls": [{ "description": "web page", "value": "http://tpweb2.phys.konan-u.ac.jp/~FirstStar4/", }], "number_of_contributions": 0, } expected_created = utils.isoformat(record.created) expected_updated = utils.isoformat(record.updated) with inspire_app.test_client() as client: response = client.get(f"/conferences/{record_control_number}", headers=headers) response_data = orjson.loads(response.data) response_data_metadata = response_data["metadata"] response_created = response_data["created"] response_updated = response_data["updated"] assert expected_metadata == response_data_metadata assert expected_created == response_created assert expected_updated == response_updated def test_conferences_json_with_logged_in_cataloger(inspire_app): user = create_user(role=Roles.cataloger.value) headers = {"Accept": "application/json"} data = {
def notify(request: HttpRequest) -> HttpResponse: process_notification(orjson.loads(request.POST['data'])) return json_success()
def cron_trigger(self) -> schemas.ScheduleCronTrigger: return orjson.loads(self.cron_trigger_str)
def batch_prediction(model_name): """ --- post: description: > Post a batch of bug ids to classify, answer either 200 if all bugs are processed or 202 if at least one bug is not processed. <br/><br/> Starts by sending a batch of bugs ids like this:<br/> ``` {"bugs": [123, 456]} ```<br/><br> You will likely get a 202 answer that indicates that no result is available yet for any of the bug id you provided with the following body:<br/> ``` {"bugs": {"123": {ready: False}, "456": {ready: False}}} ```<br/><br/> Call back the same endpoint with the same bug ids a bit later, and you will get the results.<br/><br/> You might get the following output if some bugs are not available: <br/> ``` {"bugs": {"123": {"available": False}}} ```<br/><br/> And you will get the following output once the bugs are available: <br/> ``` {"bugs": {"456": {"extra_data": {}, "index": 0, "prob": [0], "suggestion": ""}}} ```<br/><br/> Please be aware that each bug could be in a different state, so the following output, where a bug is returned and another one is still being processed, is valid: <br/> ``` {"bugs": {"123": {"available": False}, "456": {"extra_data": {}, "index": 0, "prob": [0], "suggestion": ""}}} ``` summary: Classify a batch of bugs parameters: - name: model_name in: path schema: ModelName requestBody: description: The list of bugs to classify content: application/json: schema: type: object properties: bugs: type: array items: type: integer examples: cat: summary: An example of payload value: bugs: [123456, 789012] responses: 200: description: A list of results content: application/json: schema: type: object additionalProperties: true example: bugs: 123456: extra_data: {} index: 0 prob: [0] suggestion: string 789012: extra_data: {} index: 0 prob: [0] suggestion: string 202: description: A temporary answer for bugs being processed content: application/json: schema: type: object items: type: object properties: ready: type: boolean enum: [False] example: bugs: 123456: extra_data: {} index: 0 prob: [0] suggestion: string 789012: {ready: False} 401: description: API key is missing content: application/json: schema: UnauthorizedError """ headers = request.headers auth = headers.get(API_TOKEN) if not auth: return jsonify(UnauthorizedError().dump({})), 401 else: LOGGER.info("Request with API TOKEN %r", auth) if model_name not in MODELS_NAMES: return jsonify({"error": f"Model {model_name} doesn't exist"}), 404 # TODO Check is JSON is valid and validate against a request schema batch_body = orjson.loads(request.data) # Validate schema = { "bugs": { "type": "list", "minlength": 1, "maxlength": 1000, "schema": { "type": "integer" }, } } validator = Validator() if not validator.validate(batch_body, schema): return jsonify({"errors": validator.errors}), 400 bugs = batch_body["bugs"] status_code = 200 data = {} missing_bugs = [] bug_change_dates = get_bugs_last_change_time(bugs) for bug_id in bugs: job = JobInfo(classify_bug, model_name, bug_id) change_time = bug_change_dates.get(int(bug_id), None) # Change time could be None if it's a security bug if change_time and is_prediction_invalidated(job, change_time): clean_prediction_cache(job) data[str(bug_id)] = get_result(job) if not data[str(bug_id)]: if not is_pending(job): missing_bugs.append(bug_id) status_code = 202 data[str(bug_id)] = {"ready": False} if missing_bugs: # TODO: We should probably schedule chunks of bugs to avoid jobs that # are running for too long and reduce pressure on bugzilla, it mights # not like getting 1 million bug at a time schedule_bug_classification(model_name, missing_bugs) return jsonify({"bugs": data}), status_code
def test_get_events(self) -> None: user_profile = self.example_user("hamlet") email = user_profile.email recipient_user_profile = self.example_user("othello") recipient_email = recipient_user_profile.email self.login_user(user_profile) result = self.tornado_call( get_events, user_profile, { "apply_markdown": orjson.dumps(True).decode(), "client_gravatar": orjson.dumps(True).decode(), "event_types": orjson.dumps(["message"]).decode(), "user_client": "website", "dont_block": orjson.dumps(True).decode(), }, ) self.assert_json_success(result) queue_id = orjson.loads(result.content)["queue_id"] recipient_result = self.tornado_call( get_events, recipient_user_profile, { "apply_markdown": orjson.dumps(True).decode(), "client_gravatar": orjson.dumps(True).decode(), "event_types": orjson.dumps(["message"]).decode(), "user_client": "website", "dont_block": orjson.dumps(True).decode(), }, ) self.assert_json_success(recipient_result) recipient_queue_id = orjson.loads(recipient_result.content)["queue_id"] result = self.tornado_call( get_events, user_profile, { "queue_id": queue_id, "user_client": "website", "last_event_id": -1, "dont_block": orjson.dumps(True).decode(), }, ) events = orjson.loads(result.content)["events"] self.assert_json_success(result) self.assert_length(events, 0) local_id = "10.01" check_send_message( sender=user_profile, client=get_client("whatever"), message_type_name="private", message_to=[recipient_email], topic_name=None, message_content="hello", local_id=local_id, sender_queue_id=queue_id, ) result = self.tornado_call( get_events, user_profile, { "queue_id": queue_id, "user_client": "website", "last_event_id": -1, "dont_block": orjson.dumps(True).decode(), }, ) events = orjson.loads(result.content)["events"] self.assert_json_success(result) self.assert_length(events, 1) self.assertEqual(events[0]["type"], "message") self.assertEqual(events[0]["message"]["sender_email"], email) self.assertEqual(events[0]["local_message_id"], local_id) self.assertEqual( events[0]["message"]["display_recipient"][0]["is_mirror_dummy"], False) self.assertEqual( events[0]["message"]["display_recipient"][1]["is_mirror_dummy"], False) last_event_id = events[0]["id"] local_id = "10.02" check_send_message( sender=user_profile, client=get_client("whatever"), message_type_name="private", message_to=[recipient_email], topic_name=None, message_content="hello", local_id=local_id, sender_queue_id=queue_id, ) result = self.tornado_call( get_events, user_profile, { "queue_id": queue_id, "user_client": "website", "last_event_id": last_event_id, "dont_block": orjson.dumps(True).decode(), }, ) events = orjson.loads(result.content)["events"] self.assert_json_success(result) self.assert_length(events, 1) self.assertEqual(events[0]["type"], "message") self.assertEqual(events[0]["message"]["sender_email"], email) self.assertEqual(events[0]["local_message_id"], local_id) # Test that the received message in the receiver's event queue # exists and does not contain a local id recipient_result = self.tornado_call( get_events, recipient_user_profile, { "queue_id": recipient_queue_id, "user_client": "website", "last_event_id": -1, "dont_block": orjson.dumps(True).decode(), }, ) recipient_events = orjson.loads(recipient_result.content)["events"] self.assert_json_success(recipient_result) self.assertEqual(len(recipient_events), 2) self.assertEqual(recipient_events[0]["type"], "message") self.assertEqual(recipient_events[0]["message"]["sender_email"], email) self.assertTrue("local_message_id" not in recipient_events[0]) self.assertEqual(recipient_events[1]["type"], "message") self.assertEqual(recipient_events[1]["message"]["sender_email"], email) self.assertTrue("local_message_id" not in recipient_events[1])
def handle(self, **options: Any) -> None: if options["percent_huddles"] + options["percent_personals"] > 100: self.stderr.write( "Error! More than 100% of messages allocated.\n") return # Get consistent data for backend tests. if options["test_suite"]: random.seed(0) # If max_topics is not set, we set it proportional to the # number of messages. if options["max_topics"] is None: options["max_topics"] = 1 + options["num_messages"] // 100 if options["delete"]: # Start by clearing all the data in our database clear_database() # Create our three default realms # Could in theory be done via zerver.lib.actions.do_create_realm, but # welcome-bot (needed for do_create_realm) hasn't been created yet create_internal_realm() zulip_realm = Realm.objects.create( string_id="zulip", name="Zulip Dev", emails_restricted_to_domains=False, email_address_visibility=Realm.EMAIL_ADDRESS_VISIBILITY_ADMINS, description= "The Zulip development environment default organization." " It's great for testing!", invite_required=False, org_type=Realm.CORPORATE) RealmDomain.objects.create(realm=zulip_realm, domain="zulip.com") if options["test_suite"]: mit_realm = Realm.objects.create( string_id="zephyr", name="MIT", emails_restricted_to_domains=True, invite_required=False, org_type=Realm.CORPORATE) RealmDomain.objects.create(realm=mit_realm, domain="mit.edu") lear_realm = Realm.objects.create( string_id="lear", name="Lear & Co.", emails_restricted_to_domains=False, invite_required=False, org_type=Realm.CORPORATE) # Create test Users (UserProfiles are automatically created, # as are subscriptions to the ability to receive personals). names = [ ("Zoe", "*****@*****.**"), ("Othello, the Moor of Venice", "*****@*****.**"), ("Iago", "*****@*****.**"), ("Prospero from The Tempest", "*****@*****.**"), ("Cordelia Lear", "*****@*****.**"), ("King Hamlet", "*****@*****.**"), ("aaron", "*****@*****.**"), ("Polonius", "*****@*****.**"), ("Desdemona", "*****@*****.**"), ] # For testing really large batches: # Create extra users with semi realistic names to make search # functions somewhat realistic. We'll still create 1000 users # like Extra222 User for some predicability. num_names = options['extra_users'] num_boring_names = 300 for i in range(min(num_names, num_boring_names)): full_name = f'Extra{i:03} User' names.append((full_name, f'extrauser{i}@zulip.com')) if num_names > num_boring_names: fnames = [ 'Amber', 'Arpita', 'Bob', 'Cindy', 'Daniela', 'Dan', 'Dinesh', 'Faye', 'François', 'George', 'Hank', 'Irene', 'James', 'Janice', 'Jenny', 'Jill', 'John', 'Kate', 'Katelyn', 'Kobe', 'Lexi', 'Manish', 'Mark', 'Matt', 'Mayna', 'Michael', 'Pete', 'Peter', 'Phil', 'Phillipa', 'Preston', 'Sally', 'Scott', 'Sandra', 'Steve', 'Stephanie', 'Vera' ] mnames = ['de', 'van', 'von', 'Shaw', 'T.'] lnames = [ 'Adams', 'Agarwal', 'Beal', 'Benson', 'Bonita', 'Davis', 'George', 'Harden', 'James', 'Jones', 'Johnson', 'Jordan', 'Lee', 'Leonard', 'Singh', 'Smith', 'Patel', 'Towns', 'Wall' ] for i in range(num_boring_names, num_names): fname = random.choice(fnames) + str(i) full_name = fname if random.random() < 0.7: if random.random() < 0.5: full_name += ' ' + random.choice(mnames) full_name += ' ' + random.choice(lnames) email = fname.lower() + '@zulip.com' names.append((full_name, email)) create_users(zulip_realm, names, tos_version=settings.TOS_VERSION) iago = get_user_by_delivery_email("*****@*****.**", zulip_realm) do_change_user_role(iago, UserProfile.ROLE_REALM_ADMINISTRATOR, acting_user=None) iago.is_staff = True iago.save(update_fields=['is_staff']) desdemona = get_user_by_delivery_email("*****@*****.**", zulip_realm) do_change_user_role(desdemona, UserProfile.ROLE_REALM_OWNER, acting_user=None) guest_user = get_user_by_delivery_email("*****@*****.**", zulip_realm) guest_user.role = UserProfile.ROLE_GUEST guest_user.save(update_fields=['role']) # These bots are directly referenced from code and thus # are needed for the test suite. zulip_realm_bots = [ ("Zulip Error Bot", "*****@*****.**"), ("Zulip Default Bot", "*****@*****.**"), ] for i in range(options["extra_bots"]): zulip_realm_bots.append( (f'Extra Bot {i}', f'extrabot{i}@zulip.com')) create_users(zulip_realm, zulip_realm_bots, bot_type=UserProfile.DEFAULT_BOT) zoe = get_user_by_delivery_email("*****@*****.**", zulip_realm) zulip_webhook_bots = [ ("Zulip Webhook Bot", "*****@*****.**"), ] # If a stream is not supplied in the webhook URL, the webhook # will (in some cases) send the notification as a PM to the # owner of the webhook bot, so bot_owner can't be None create_users(zulip_realm, zulip_webhook_bots, bot_type=UserProfile.INCOMING_WEBHOOK_BOT, bot_owner=zoe) aaron = get_user_by_delivery_email("*****@*****.**", zulip_realm) zulip_outgoing_bots = [ ("Outgoing Webhook", "*****@*****.**"), ] create_users(zulip_realm, zulip_outgoing_bots, bot_type=UserProfile.OUTGOING_WEBHOOK_BOT, bot_owner=aaron) outgoing_webhook = get_user("*****@*****.**", zulip_realm) add_service("outgoing-webhook", user_profile=outgoing_webhook, interface=Service.GENERIC, base_url="http://127.0.0.1:5002", token=generate_api_key()) # Add the realm internal bots to each realm. create_if_missing_realm_internal_bots() # Create public streams. stream_list = ["Verona", "Denmark", "Scotland", "Venice", "Rome"] stream_dict: Dict[str, Dict[str, Any]] = { "Verona": { "description": "A city in Italy" }, "Denmark": { "description": "A Scandinavian country" }, "Scotland": { "description": "Located in the United Kingdom" }, "Venice": { "description": "A northeastern Italian city" }, "Rome": { "description": "Yet another Italian city", "is_web_public": True }, } bulk_create_streams(zulip_realm, stream_dict) recipient_streams: List[int] = [ Stream.objects.get(name=name, realm=zulip_realm).id for name in stream_list ] # Create subscriptions to streams. The following # algorithm will give each of the users a different but # deterministic subset of the streams (given a fixed list # of users). For the test suite, we have a fixed list of # subscriptions to make sure test data is consistent # across platforms. subscriptions_list: List[Tuple[UserProfile, Recipient]] = [] profiles: Sequence[ UserProfile] = UserProfile.objects.select_related().filter( is_bot=False).order_by("email") if options["test_suite"]: subscriptions_map = { '*****@*****.**': ['Verona'], '*****@*****.**': ['Verona'], '*****@*****.**': ['Verona', 'Denmark'], '*****@*****.**': ['Verona', 'Denmark', 'Scotland'], '*****@*****.**': ['Verona', 'Denmark', 'Scotland'], '*****@*****.**': ['Verona', 'Denmark', 'Scotland', 'Venice'], '*****@*****.**': ['Verona', 'Denmark', 'Scotland', 'Venice', 'Rome'], '*****@*****.**': ['Verona'], '*****@*****.**': ['Verona', 'Denmark', 'Venice'], } for profile in profiles: email = profile.delivery_email if email not in subscriptions_map: raise Exception( f'Subscriptions not listed for user {email}') for stream_name in subscriptions_map[email]: stream = Stream.objects.get(name=stream_name) r = Recipient.objects.get(type=Recipient.STREAM, type_id=stream.id) subscriptions_list.append((profile, r)) else: num_streams = len(recipient_streams) num_users = len(profiles) for i, profile in enumerate(profiles): # Subscribe to some streams. fraction = float(i) / num_users num_recips = int(num_streams * fraction) + 1 for type_id in recipient_streams[:num_recips]: r = Recipient.objects.get(type=Recipient.STREAM, type_id=type_id) subscriptions_list.append((profile, r)) subscriptions_to_add: List[Subscription] = [] event_time = timezone_now() all_subscription_logs: (List[RealmAuditLog]) = [] i = 0 for profile, recipient in subscriptions_list: i += 1 color = STREAM_ASSIGNMENT_COLORS[i % len(STREAM_ASSIGNMENT_COLORS)] s = Subscription(recipient=recipient, user_profile=profile, color=color) subscriptions_to_add.append(s) log = RealmAuditLog( realm=profile.realm, modified_user=profile, modified_stream_id=recipient.type_id, event_last_message_id=0, event_type=RealmAuditLog.SUBSCRIPTION_CREATED, event_time=event_time) all_subscription_logs.append(log) Subscription.objects.bulk_create(subscriptions_to_add) RealmAuditLog.objects.bulk_create(all_subscription_logs) # Create custom profile field data phone_number = try_add_realm_custom_profile_field( zulip_realm, "Phone number", CustomProfileField.SHORT_TEXT, hint='') biography = try_add_realm_custom_profile_field( zulip_realm, "Biography", CustomProfileField.LONG_TEXT, hint='What are you known for?') favorite_food = try_add_realm_custom_profile_field( zulip_realm, "Favorite food", CustomProfileField.SHORT_TEXT, hint="Or drink, if you'd prefer") field_data: ProfileFieldData = { 'vim': { 'text': 'Vim', 'order': '1' }, 'emacs': { 'text': 'Emacs', 'order': '2' }, } favorite_editor = try_add_realm_custom_profile_field( zulip_realm, "Favorite editor", CustomProfileField.CHOICE, field_data=field_data) birthday = try_add_realm_custom_profile_field( zulip_realm, "Birthday", CustomProfileField.DATE) favorite_website = try_add_realm_custom_profile_field( zulip_realm, "Favorite website", CustomProfileField.URL, hint="Or your personal blog's URL") mentor = try_add_realm_custom_profile_field( zulip_realm, "Mentor", CustomProfileField.USER) github_profile = try_add_realm_default_custom_profile_field( zulip_realm, "github") # Fill in values for Iago and Hamlet hamlet = get_user_by_delivery_email("*****@*****.**", zulip_realm) do_update_user_custom_profile_data_if_changed( iago, [ { "id": phone_number.id, "value": "+1-234-567-8901" }, { "id": biography.id, "value": "Betrayer of Othello." }, { "id": favorite_food.id, "value": "Apples" }, { "id": favorite_editor.id, "value": "emacs" }, { "id": birthday.id, "value": "2000-1-1" }, { "id": favorite_website.id, "value": "https://zulip.readthedocs.io/en/latest/" }, { "id": mentor.id, "value": [hamlet.id] }, { "id": github_profile.id, "value": 'zulip' }, ]) do_update_user_custom_profile_data_if_changed( hamlet, [ { "id": phone_number.id, "value": "+0-11-23-456-7890" }, { "id": biography.id, "value": "I am:\n* The prince of Denmark\n* Nephew to the usurping Claudius", }, { "id": favorite_food.id, "value": "Dark chocolate" }, { "id": favorite_editor.id, "value": "vim" }, { "id": birthday.id, "value": "1900-1-1" }, { "id": favorite_website.id, "value": "https://blog.zulig.org" }, { "id": mentor.id, "value": [iago.id] }, { "id": github_profile.id, "value": 'zulipbot' }, ]) else: zulip_realm = get_realm("zulip") recipient_streams = [ klass.type_id for klass in Recipient.objects.filter(type=Recipient.STREAM) ] # Extract a list of all users user_profiles: List[UserProfile] = list( UserProfile.objects.filter(is_bot=False)) # Create a test realm emoji. IMAGE_FILE_PATH = static_path('images/test-images/checkbox.png') with open(IMAGE_FILE_PATH, 'rb') as fp: check_add_realm_emoji(zulip_realm, 'green_tick', iago, fp) if not options["test_suite"]: # Populate users with some bar data for user in user_profiles: status: int = UserPresence.ACTIVE date = timezone_now() client = get_client("website") if user.full_name[0] <= 'H': client = get_client("ZulipAndroid") UserPresence.objects.get_or_create(user_profile=user, realm_id=user.realm_id, client=client, timestamp=date, status=status) user_profiles_ids = [user_profile.id for user_profile in user_profiles] # Create several initial huddles for i in range(options["num_huddles"]): get_huddle(random.sample(user_profiles_ids, random.randint(3, 4))) # Create several initial pairs for personals personals_pairs = [ random.sample(user_profiles_ids, 2) for i in range(options["num_personals"]) ] create_alert_words(zulip_realm.id) # Generate a new set of test data. create_test_data() # prepopulate the URL preview/embed data for the links present # in the config.generate_data.json data set. This makes it # possible for populate_db to run happily without Internet # access. with open("zerver/tests/fixtures/docs_url_preview_data.json", "rb") as f: urls_with_preview_data = orjson.loads(f.read()) for url in urls_with_preview_data: cache_set(url, urls_with_preview_data[url], PREVIEW_CACHE_NAME) if options["delete"]: if options["test_suite"]: # Create test users; the MIT ones are needed to test # the Zephyr mirroring codepaths. testsuite_mit_users = [ ("Fred Sipb (MIT)", "*****@*****.**"), ("Athena Consulting Exchange User (MIT)", "*****@*****.**"), ("Esp Classroom (MIT)", "*****@*****.**"), ] create_users(mit_realm, testsuite_mit_users, tos_version=settings.TOS_VERSION) testsuite_lear_users = [ ("King Lear", "*****@*****.**"), ("Cordelia Lear", "*****@*****.**"), ] create_users(lear_realm, testsuite_lear_users, tos_version=settings.TOS_VERSION) if not options["test_suite"]: # To keep the messages.json fixtures file for the test # suite fast, don't add these users and subscriptions # when running populate_db for the test suite zulip_stream_dict: Dict[str, Dict[str, Any]] = { "devel": { "description": "For developing" }, "all": { "description": "For **everything**" }, "announce": { "description": "For announcements", 'stream_post_policy': Stream.STREAM_POST_POLICY_ADMINS }, "design": { "description": "For design" }, "support": { "description": "For support" }, "social": { "description": "For socializing" }, "test": { "description": "For testing `code`" }, "errors": { "description": "For errors" }, "sales": { "description": "For sales discussion" }, } # Calculate the maximum number of digits in any extra stream's # number, since a stream with name "Extra Stream 3" could show # up after "Extra Stream 29". (Used later to pad numbers with # 0s). maximum_digits = len(str(options['extra_streams'] - 1)) for i in range(options['extra_streams']): # Pad the number with 0s based on `maximum_digits`. number_str = str(i).zfill(maximum_digits) extra_stream_name = 'Extra Stream ' + number_str zulip_stream_dict[extra_stream_name] = { "description": "Auto-generated extra stream.", } bulk_create_streams(zulip_realm, zulip_stream_dict) # Now that we've created the notifications stream, configure it properly. zulip_realm.notifications_stream = get_stream( "announce", zulip_realm) zulip_realm.save(update_fields=['notifications_stream']) # Add a few default streams for default_stream_name in [ "design", "devel", "social", "support" ]: DefaultStream.objects.create(realm=zulip_realm, stream=get_stream( default_stream_name, zulip_realm)) # Now subscribe everyone to these streams subscribe_users_to_streams(zulip_realm, zulip_stream_dict) if not options["test_suite"]: # Update pointer of each user to point to the last message in their # UserMessage rows with sender_id=user_profile_id. users = list( UserMessage.objects.filter(message__sender_id=F( 'user_profile_id')).values('user_profile_id').annotate( pointer=Max('message_id'))) for user in users: UserProfile.objects.filter( id=user['user_profile_id']).update( pointer=user['pointer']) create_user_groups() if not options["test_suite"]: # We populate the analytics database here for # development purpose only call_command('populate_analytics_db') threads = options["threads"] jobs: List[Tuple[int, List[List[int]], Dict[str, Any], Callable[[str], int], int]] = [] for i in range(threads): count = options["num_messages"] // threads if i < options["num_messages"] % threads: count += 1 jobs.append((count, personals_pairs, options, self.stdout.write, random.randint(0, 10**10))) for job in jobs: generate_and_send_messages(job) if options["delete"]: if not options['test_suite']: # These bots are not needed by the test suite # Also, we don't want interacting with each other # in dev setup. internal_zulip_users_nosubs = [ ("Zulip Commit Bot", "*****@*****.**"), ("Zulip Trac Bot", "*****@*****.**"), ("Zulip Nagios Bot", "*****@*****.**"), ] create_users(zulip_realm, internal_zulip_users_nosubs, bot_type=UserProfile.DEFAULT_BOT) mark_all_messages_as_read() self.stdout.write("Successfully populated test database.\n")
def get_body(self, fixture_name: str) -> str: assert self.FIXTURE_DIR_NAME is not None body = self.webhook_fixture_data(self.FIXTURE_DIR_NAME, fixture_name) # fail fast if we don't have valid json orjson.loads(body) return body