Ejemplo n.º 1
0
 def test_dict_keys_substr_invalid(self):
     with self.assertRaises(orjson.JSONEncodeError):
         orjson.dumps({SubStr("\ud800"): True},
                      option=orjson.OPT_NON_STR_KEYS)
Ejemplo n.º 2
0
    def do_test_realm_update_api(self, name: str) -> None:
        """Test updating realm properties.

        If new realm properties have been added to the Realm model but the
        test_values dict below has not been updated, this will raise an
        assertion error.
        """

        bool_tests: List[bool] = [False, True]
        test_values: Dict[str, Any] = dict(
            default_language=["de", "en"],
            default_code_block_language=["javascript", ""],
            description=["Realm description", "New description"],
            digest_weekday=[0, 1, 2],
            message_retention_days=[10, 20],
            name=["Zulip", "New Name"],
            waiting_period_threshold=[10, 20],
            create_stream_policy=[
                Realm.POLICY_ADMINS_ONLY,
                Realm.POLICY_MEMBERS_ONLY,
                Realm.POLICY_FULL_MEMBERS_ONLY,
                Realm.POLICY_MODERATORS_ONLY,
            ],
            user_group_edit_policy=[
                Realm.USER_GROUP_EDIT_POLICY_ADMINS,
                Realm.USER_GROUP_EDIT_POLICY_MEMBERS,
            ],
            private_message_policy=[
                Realm.PRIVATE_MESSAGE_POLICY_UNLIMITED,
                Realm.PRIVATE_MESSAGE_POLICY_DISABLED,
            ],
            invite_to_stream_policy=[
                Realm.POLICY_ADMINS_ONLY,
                Realm.POLICY_MEMBERS_ONLY,
                Realm.POLICY_FULL_MEMBERS_ONLY,
                Realm.POLICY_MODERATORS_ONLY,
            ],
            wildcard_mention_policy=[
                Realm.WILDCARD_MENTION_POLICY_EVERYONE,
                Realm.WILDCARD_MENTION_POLICY_MEMBERS,
                Realm.WILDCARD_MENTION_POLICY_FULL_MEMBERS,
                Realm.WILDCARD_MENTION_POLICY_STREAM_ADMINS,
                Realm.WILDCARD_MENTION_POLICY_ADMINS,
                Realm.WILDCARD_MENTION_POLICY_NOBODY,
                Realm.WILDCARD_MENTION_POLICY_MODERATORS,
            ],
            bot_creation_policy=[1, 2],
            email_address_visibility=[
                Realm.EMAIL_ADDRESS_VISIBILITY_EVERYONE,
                Realm.EMAIL_ADDRESS_VISIBILITY_ADMINS,
                Realm.EMAIL_ADDRESS_VISIBILITY_NOBODY,
            ],
            video_chat_provider=[
                dict(
                    video_chat_provider=orjson.dumps(
                        Realm.VIDEO_CHAT_PROVIDERS["jitsi_meet"]["id"]
                    ).decode(),
                ),
            ],
            giphy_rating=[
                Realm.GIPHY_RATING_OPTIONS["y"]["id"],
                Realm.GIPHY_RATING_OPTIONS["r"]["id"],
            ],
            message_content_delete_limit_seconds=[1000, 1100, 1200],
            invite_to_realm_policy=[
                Realm.POLICY_ADMINS_ONLY,
                Realm.POLICY_MEMBERS_ONLY,
                Realm.POLICY_FULL_MEMBERS_ONLY,
                Realm.POLICY_MODERATORS_ONLY,
            ],
            move_messages_between_streams_policy=[
                Realm.POLICY_ADMINS_ONLY,
                Realm.POLICY_MEMBERS_ONLY,
                Realm.POLICY_FULL_MEMBERS_ONLY,
                Realm.POLICY_MODERATORS_ONLY,
            ],
        )

        vals = test_values.get(name)
        if Realm.property_types[name] is bool:
            vals = bool_tests
        if vals is None:
            raise AssertionError(f"No test created for {name}")

        if name == "video_chat_provider":
            self.set_up_db(name, vals[0][name])
            realm = self.update_with_api_multiple_value(vals[0])
            self.assertEqual(getattr(realm, name), orjson.loads(vals[0][name]))
        else:
            self.set_up_db(name, vals[0])
            realm = self.update_with_api(name, vals[1])
            self.assertEqual(getattr(realm, name), vals[1])
            realm = self.update_with_api(name, vals[0])
            self.assertEqual(getattr(realm, name), vals[0])
Ejemplo n.º 3
0
def nest_get_spikes_by_spikerecorder(spikerecorder_id,
                                     from_time=None,
                                     to_time=None,
                                     node_ids=None,
                                     skip=None,
                                     top=None):  # noqa: E501
    """Retrieves the spikes for the given time range (optional) and node IDs (optional) from one spike detector. If no time range or node list is specified, it will return the spikes for whole time or all nodes respectively.

     # noqa: E501

    :param spikedetector_id: The ID of the spike detector to query.
    :type spikedetector_id: int
    :param from_time: The start time in milliseconds (including) to be queried.
    :type from_time: float
    :param to_time: The end time in milliseconds (excluding) to be queried.
    :type to_time: float
    :param node_ids: A list of node IDs queried for spike data.
    :type node_ids: List[int]
    :param skip: The offset into the result.
    :type skip: int
    :param top: The maximum number of entries to be returned.
    :type top: int

    :rtype: Spikes
    """
    spikes = Spikes([], [])
    simulation_times = []
    node_id_list = []
    lastFrame = False

    for node in simulation_nodes.nest_simulation_nodes:

        if node_ids is not None:
            node_id_param = ",".join(map(str, node_ids))
        else:
            node_id_param = None

        response = requests.get(node + "/spikes",
                                params={
                                    "fromTime": from_time,
                                    "toTime": to_time,
                                    "nodeIds": node_id_param,
                                    "spikedetectorId": spikerecorder_id
                                })

        response = orjson.loads(response.content)
        lastFrame = response["lastFrame"]
        simulation_times = simulation_times + response["simulationTimes"]
        node_id_list = node_id_list + response["nodeIds"]

    #sort
    sorted_lists = sort_together([simulation_times, node_id_list])
    if sorted_lists != []:
        spikes.simulation_times = sorted_lists[0]
        spikes.node_ids = sorted_lists[1]

    # offset and limit
    if (skip is None):
        skip = 0
    if (top is None or (top + skip) > len(spikes.node_ids)):
        top = len(spikes.node_ids) - skip
    spikes.node_ids = spikes.node_ids[skip:skip + top]
    spikes.simulation_times = spikes.simulation_times[skip:skip + top]

    json_string = orjson.dumps({
        "nodeIds": spikes.node_ids,
        "simulationTimes": spikes.simulation_times,
        "lastFrame": lastFrame
    })
    return ConnexionResponse(status_code=200,
                             content_type='application/json',
                             mimetype='text/plain',
                             body=json_string)
Ejemplo n.º 4
0
 def get_ui_display(self, record):
     return orjson.dumps(LiteratureDetailSchema().dump(record).data).decode("utf-8")
Ejemplo n.º 5
0
def _dump_message(data):
    return orjson.dumps(data) + __CRLF
Ejemplo n.º 6
0
 def write(self, elems):
     for elem in elems:
         self.fh.write(orjson.dumps(elem) + b"\n")
class TestOutgoingWebhookMessaging(ZulipTestCase):
    def create_outgoing_bot(self, bot_owner: UserProfile) -> UserProfile:
        return self.create_test_bot(
            "outgoing-webhook",
            bot_owner,
            full_name="Outgoing Webhook bot",
            bot_type=UserProfile.OUTGOING_WEBHOOK_BOT,
            service_name="foo-service",
        )

    def test_multiple_services(self) -> None:
        bot_owner = self.example_user("othello")

        bot = do_create_user(
            bot_owner=bot_owner,
            bot_type=UserProfile.OUTGOING_WEBHOOK_BOT,
            full_name="Outgoing Webhook Bot",
            email="whatever",
            realm=bot_owner.realm,
            password=None,
            acting_user=None,
        )

        add_service(
            "weather",
            user_profile=bot,
            interface=Service.GENERIC,
            base_url="weather_url",
            token="weather_token",
        )

        add_service(
            "qotd",
            user_profile=bot,
            interface=Service.GENERIC,
            base_url="qotd_url",
            token="qotd_token",
        )

        sender = self.example_user("hamlet")

        with mock.patch("zerver.worker.queue_processors.do_rest_call") as m:
            self.send_personal_message(
                sender,
                bot,
                content="some content",
            )

        url_token_tups = set()
        for item in m.call_args_list:
            args = item[0]
            base_url = args[0]
            request_data = orjson.loads(args[1])
            tup = (base_url, request_data["token"])
            url_token_tups.add(tup)
            message_data = request_data["message"]
            self.assertEqual(message_data["content"], "some content")
            self.assertEqual(message_data["sender_id"], sender.id)

        self.assertEqual(
            url_token_tups,
            {
                ("weather_url", "weather_token"),
                ("qotd_url", "qotd_token"),
            },
        )

    @mock.patch(
        "requests.request",
        return_value=ResponseMock(
            200,
            orjson.dumps(
                {"response_string": "Hidley ho, I'm a webhook responding!"})),
    )
    def test_pm_to_outgoing_webhook_bot(
            self, mock_requests_request: mock.Mock) -> None:
        bot_owner = self.example_user("othello")
        bot = self.create_outgoing_bot(bot_owner)
        sender = self.example_user("hamlet")

        self.send_personal_message(sender, bot, content="foo")
        last_message = self.get_last_message()
        self.assertEqual(last_message.content,
                         "Hidley ho, I'm a webhook responding!")
        self.assertEqual(last_message.sender_id, bot.id)
        self.assertEqual(
            last_message.recipient.type_id,
            sender.id,
        )
        self.assertEqual(
            last_message.recipient.type,
            Recipient.PERSONAL,
        )

    @mock.patch(
        "requests.request",
        return_value=ResponseMock(
            200,
            orjson.dumps(
                {"response_string": "Hidley ho, I'm a webhook responding!"})),
    )
    def test_stream_message_to_outgoing_webhook_bot(
            self, mock_requests_request: mock.Mock) -> None:
        bot_owner = self.example_user("othello")
        bot = self.create_outgoing_bot(bot_owner)

        self.send_stream_message(bot_owner,
                                 "Denmark",
                                 content=f"@**{bot.full_name}** foo",
                                 topic_name="bar")
        last_message = self.get_last_message()
        self.assertEqual(last_message.content,
                         "Hidley ho, I'm a webhook responding!")
        self.assertEqual(last_message.sender_id, bot.id)
        self.assertEqual(last_message.topic_name(), "bar")
        display_recipient = get_display_recipient(last_message.recipient)
        self.assertEqual(display_recipient, "Denmark")
Ejemplo n.º 8
0
    def test_create_external_account_field(self) -> None:
        self.login('iago')
        realm = get_realm('zulip')
        data: Dict[str, Union[str, int, Dict[str, str]]] = {}
        data["name"] = "Twitter"
        data["field_type"] = CustomProfileField.EXTERNAL_ACCOUNT

        data['field_data'] = 'invalid'
        result = self.client_post("/json/realm/profile_fields", info=data)
        self.assert_json_error(result, "Bad value for 'field_data': invalid")

        data['field_data'] = orjson.dumps({}).decode()
        result = self.client_post("/json/realm/profile_fields", info=data)
        self.assert_json_error(result,
                               "subtype key is missing from field_data")

        data["field_data"] = orjson.dumps({
            'subtype': '',
        }).decode()
        result = self.client_post("/json/realm/profile_fields", info=data)
        self.assert_json_error(result,
                               'field_data["subtype"] cannot be blank.')

        data["field_data"] = orjson.dumps({
            'subtype': '123',
        }).decode()
        result = self.client_post("/json/realm/profile_fields", info=data)
        self.assert_json_error(result, 'Invalid external account type')

        non_default_external_account = 'linkedin'
        data["field_data"] = orjson.dumps({
            'subtype':
            non_default_external_account,
        }).decode()
        result = self.client_post("/json/realm/profile_fields", info=data)
        self.assert_json_error(result, 'Invalid external account type')

        data["field_data"] = orjson.dumps({
            'subtype': 'twitter',
        }).decode()
        result = self.client_post("/json/realm/profile_fields", info=data)
        self.assert_json_success(result)

        twitter_field = CustomProfileField.objects.get(name="Twitter",
                                                       realm=realm)
        self.assertEqual(twitter_field.field_type,
                         CustomProfileField.EXTERNAL_ACCOUNT)
        self.assertEqual(twitter_field.name, "Twitter")
        self.assertEqual(
            orjson.loads(twitter_field.field_data)['subtype'], 'twitter')

        data['name'] = 'Reddit'
        data["field_data"] = orjson.dumps({
            'subtype': 'custom',
        }).decode()
        result = self.client_post("/json/realm/profile_fields", info=data)
        self.assert_json_error(
            result, 'Custom external account must define url pattern')

        data["field_data"] = orjson.dumps({
            'subtype': 'custom',
            'url_pattern': 123,
        }).decode()
        result = self.client_post("/json/realm/profile_fields", info=data)
        self.assert_json_error(result,
                               'field_data["url_pattern"] is not a string')

        data["field_data"] = orjson.dumps({
            'subtype': 'custom',
            'url_pattern': 'invalid',
        }).decode()
        result = self.client_post("/json/realm/profile_fields", info=data)
        self.assert_json_error(result, 'Malformed URL pattern.')

        data["field_data"] = orjson.dumps({
            'subtype':
            'custom',
            'url_pattern':
            'https://www.reddit.com/%(username)s/user/%(username)s',
        }).decode()
        result = self.client_post("/json/realm/profile_fields", info=data)
        self.assert_json_error(result, 'Malformed URL pattern.')

        data["field_data"] = orjson.dumps({
            'subtype':
            'custom',
            'url_pattern':
            'reddit.com/%(username)s',
        }).decode()
        result = self.client_post("/json/realm/profile_fields", info=data)
        self.assert_json_error(result,
                               'field_data["url_pattern"] is not a URL')

        data["field_data"] = orjson.dumps({
            'subtype':
            'custom',
            'url_pattern':
            'https://www.reddit.com/user/%(username)s',
        }).decode()
        result = self.client_post("/json/realm/profile_fields", info=data)
        self.assert_json_success(result)

        custom_field = CustomProfileField.objects.get(name="Reddit",
                                                      realm=realm)
        self.assertEqual(custom_field.field_type,
                         CustomProfileField.EXTERNAL_ACCOUNT)
        self.assertEqual(custom_field.name, "Reddit")
        field_data = orjson.loads(custom_field.field_data)
        self.assertEqual(field_data['subtype'], 'custom')
        self.assertEqual(field_data['url_pattern'],
                         'https://www.reddit.com/user/%(username)s')

        result = self.client_post("/json/realm/profile_fields", info=data)
        self.assert_json_error(result,
                               "A field with that label already exists.")
Ejemplo n.º 9
0
    def test_update(self) -> None:
        self.login('iago')
        realm = get_realm('zulip')
        result = self.client_patch(
            "/json/realm/profile_fields/100",
            info={
                'name': 'Phone Number',
                'field_type': CustomProfileField.SHORT_TEXT
            },
        )
        self.assert_json_error(result, 'Field id 100 not found.')

        field = CustomProfileField.objects.get(name="Phone number",
                                               realm=realm)
        result = self.client_patch(
            f"/json/realm/profile_fields/{field.id}",
            info={
                'name': '',
                'field_type': CustomProfileField.SHORT_TEXT
            },
        )
        self.assert_json_error(result, 'Label cannot be blank.')

        self.assertEqual(CustomProfileField.objects.count(),
                         self.original_count)
        result = self.client_patch(f"/json/realm/profile_fields/{field.id}",
                                   info={
                                       'name': 'New phone number',
                                       'field_type':
                                       CustomProfileField.SHORT_TEXT
                                   })
        self.assert_json_success(result)
        field = CustomProfileField.objects.get(id=field.id, realm=realm)
        self.assertEqual(CustomProfileField.objects.count(),
                         self.original_count)
        self.assertEqual(field.name, 'New phone number')
        self.assertIs(field.hint, '')
        self.assertEqual(field.field_type, CustomProfileField.SHORT_TEXT)

        result = self.client_patch(f"/json/realm/profile_fields/{field.id}",
                                   info={
                                       'name': '*' * 41,
                                       'field_type':
                                       CustomProfileField.SHORT_TEXT
                                   })
        msg = "name is too long (limit: 40 characters)"
        self.assert_json_error(result, msg)

        result = self.client_patch(f"/json/realm/profile_fields/{field.id}",
                                   info={
                                       'name': 'New phone number',
                                       'hint': '*' * 81,
                                       'field_type':
                                       CustomProfileField.SHORT_TEXT
                                   })
        msg = "hint is too long (limit: 80 characters)"
        self.assert_json_error(result, msg)

        result = self.client_patch(f"/json/realm/profile_fields/{field.id}",
                                   info={
                                       'name': 'New phone number',
                                       'hint': 'New contact number',
                                       'field_type':
                                       CustomProfileField.SHORT_TEXT
                                   })
        self.assert_json_success(result)

        field = CustomProfileField.objects.get(id=field.id, realm=realm)
        self.assertEqual(CustomProfileField.objects.count(),
                         self.original_count)
        self.assertEqual(field.name, 'New phone number')
        self.assertEqual(field.hint, 'New contact number')
        self.assertEqual(field.field_type, CustomProfileField.SHORT_TEXT)

        result = self.client_patch(
            f"/json/realm/profile_fields/{field.id}",
            info={
                'name': 'Name ',
                'field_type': CustomProfileField.SHORT_TEXT
            },
        )
        self.assert_json_success(result)
        field.refresh_from_db()
        self.assertEqual(field.name, 'Name')

        field = CustomProfileField.objects.get(name="Favorite editor",
                                               realm=realm)
        result = self.client_patch(f"/json/realm/profile_fields/{field.id}",
                                   info={
                                       'name': 'Favorite editor',
                                       'field_data': 'invalid'
                                   })
        self.assert_json_error(result, "Bad value for 'field_data': invalid")

        field_data = orjson.dumps({
            'vim': 'Vim',
            'emacs': {
                'order': '2',
                'text': 'Emacs'
            },
        }).decode()
        result = self.client_patch(f"/json/realm/profile_fields/{field.id}",
                                   info={
                                       'name': 'Favorite editor',
                                       'field_data': field_data
                                   })
        self.assert_json_error(result, "field_data is not a dict")

        field_data = orjson.dumps({
            'vim': {
                'order': '1',
                'text': 'Vim'
            },
            'emacs': {
                'order': '2',
                'text': 'Emacs'
            },
            'notepad': {
                'order': '3',
                'text': 'Notepad'
            },
        }).decode()
        result = self.client_patch(f"/json/realm/profile_fields/{field.id}",
                                   info={
                                       'name': 'Favorite editor',
                                       'field_data': field_data
                                   })
        self.assert_json_success(result)
Ejemplo n.º 10
0
def process_initial_upgrade(
    user: UserProfile,
    licenses: int,
    automanage_licenses: bool,
    billing_schedule: int,
    charge_automatically: bool,
    free_trial: bool,
) -> None:
    realm = user.realm
    customer = update_or_create_stripe_customer(user)
    assert customer.stripe_customer_id is not None  # for mypy
    assert customer.realm is not None
    ensure_realm_does_not_have_active_plan(customer.realm)
    (
        billing_cycle_anchor,
        next_invoice_date,
        period_end,
        price_per_license,
    ) = compute_plan_parameters(
        CustomerPlan.STANDARD,
        automanage_licenses,
        billing_schedule,
        customer.default_discount,
        free_trial,
    )

    # TODO: The correctness of this relies on user creation, deactivation, etc being
    # in a transaction.atomic() with the relevant RealmAuditLog entries
    with transaction.atomic():
        # billed_licenses can greater than licenses if users are added between the start of
        # this function (process_initial_upgrade) and now
        billed_licenses = max(get_latest_seat_count(realm), licenses)
        plan_params = {
            "automanage_licenses": automanage_licenses,
            "charge_automatically": charge_automatically,
            "price_per_license": price_per_license,
            "discount": customer.default_discount,
            "billing_cycle_anchor": billing_cycle_anchor,
            "billing_schedule": billing_schedule,
            "tier": CustomerPlan.STANDARD,
        }
        if free_trial:
            plan_params["status"] = CustomerPlan.FREE_TRIAL
        plan = CustomerPlan.objects.create(
            customer=customer, next_invoice_date=next_invoice_date, **plan_params
        )
        ledger_entry = LicenseLedger.objects.create(
            plan=plan,
            is_renewal=True,
            event_time=billing_cycle_anchor,
            licenses=billed_licenses,
            licenses_at_next_renewal=billed_licenses,
        )
        plan.invoiced_through = ledger_entry
        plan.save(update_fields=["invoiced_through"])
        RealmAuditLog.objects.create(
            realm=realm,
            acting_user=user,
            event_time=billing_cycle_anchor,
            event_type=RealmAuditLog.CUSTOMER_PLAN_CREATED,
            extra_data=orjson.dumps(plan_params, default=decimal_to_float).decode(),
        )

    if not free_trial:
        stripe.InvoiceItem.create(
            currency="usd",
            customer=customer.stripe_customer_id,
            description="Zulip Cloud Standard",
            discountable=False,
            period={
                "start": datetime_to_timestamp(billing_cycle_anchor),
                "end": datetime_to_timestamp(period_end),
            },
            quantity=billed_licenses,
            unit_amount=price_per_license,
        )

        if charge_automatically:
            collection_method = "charge_automatically"
            days_until_due = None
        else:
            collection_method = "send_invoice"
            days_until_due = DEFAULT_INVOICE_DAYS_UNTIL_DUE

        stripe_invoice = stripe.Invoice.create(
            auto_advance=True,
            collection_method=collection_method,
            customer=customer.stripe_customer_id,
            days_until_due=days_until_due,
            statement_descriptor="Zulip Cloud Standard",
        )
        stripe.Invoice.finalize_invoice(stripe_invoice)

    from zerver.actions.realm_settings import do_change_realm_plan_type

    do_change_realm_plan_type(realm, Realm.PLAN_TYPE_STANDARD, acting_user=user)
Ejemplo n.º 11
0
 def consume(self, event: Mapping[str, Any]) -> None:  # nocoverage
     fn = settings.ZULIP_WORKER_TEST_FILE
     message = orjson.dumps(event)
     logging.info("TestWorker should append this message to %s: %s", fn, message.decode())
     with open(fn, "ab") as f:
         f.write(message + b"\n")
Ejemplo n.º 12
0
def make_end_of_cycle_updates_if_needed(
    plan: CustomerPlan, event_time: datetime
) -> Tuple[Optional[CustomerPlan], Optional[LicenseLedger]]:
    last_ledger_entry = LicenseLedger.objects.filter(plan=plan).order_by("-id").first()
    last_ledger_renewal = (
        LicenseLedger.objects.filter(plan=plan, is_renewal=True).order_by("-id").first()
    )
    assert last_ledger_renewal is not None
    last_renewal = last_ledger_renewal.event_time

    if plan.is_free_trial() or plan.status == CustomerPlan.SWITCH_NOW_FROM_STANDARD_TO_PLUS:
        assert plan.next_invoice_date is not None
        next_billing_cycle = plan.next_invoice_date
    else:
        next_billing_cycle = start_of_next_billing_cycle(plan, last_renewal)
    if next_billing_cycle <= event_time and last_ledger_entry is not None:
        licenses_at_next_renewal = last_ledger_entry.licenses_at_next_renewal
        assert licenses_at_next_renewal is not None
        if plan.status == CustomerPlan.ACTIVE:
            return None, LicenseLedger.objects.create(
                plan=plan,
                is_renewal=True,
                event_time=next_billing_cycle,
                licenses=licenses_at_next_renewal,
                licenses_at_next_renewal=licenses_at_next_renewal,
            )
        if plan.is_free_trial():
            plan.invoiced_through = last_ledger_entry
            plan.billing_cycle_anchor = next_billing_cycle.replace(microsecond=0)
            plan.status = CustomerPlan.ACTIVE
            plan.save(update_fields=["invoiced_through", "billing_cycle_anchor", "status"])
            return None, LicenseLedger.objects.create(
                plan=plan,
                is_renewal=True,
                event_time=next_billing_cycle,
                licenses=licenses_at_next_renewal,
                licenses_at_next_renewal=licenses_at_next_renewal,
            )

        if plan.status == CustomerPlan.SWITCH_TO_ANNUAL_AT_END_OF_CYCLE:
            if plan.fixed_price is not None:  # nocoverage
                raise NotImplementedError("Can't switch fixed priced monthly plan to annual.")

            plan.status = CustomerPlan.ENDED
            plan.save(update_fields=["status"])

            discount = plan.customer.default_discount or plan.discount
            _, _, _, price_per_license = compute_plan_parameters(
                tier=plan.tier,
                automanage_licenses=plan.automanage_licenses,
                billing_schedule=CustomerPlan.ANNUAL,
                discount=plan.discount,
            )

            new_plan = CustomerPlan.objects.create(
                customer=plan.customer,
                billing_schedule=CustomerPlan.ANNUAL,
                automanage_licenses=plan.automanage_licenses,
                charge_automatically=plan.charge_automatically,
                price_per_license=price_per_license,
                discount=discount,
                billing_cycle_anchor=next_billing_cycle,
                tier=plan.tier,
                status=CustomerPlan.ACTIVE,
                next_invoice_date=next_billing_cycle,
                invoiced_through=None,
                invoicing_status=CustomerPlan.INITIAL_INVOICE_TO_BE_SENT,
            )

            new_plan_ledger_entry = LicenseLedger.objects.create(
                plan=new_plan,
                is_renewal=True,
                event_time=next_billing_cycle,
                licenses=licenses_at_next_renewal,
                licenses_at_next_renewal=licenses_at_next_renewal,
            )

            realm = new_plan.customer.realm
            assert realm is not None

            RealmAuditLog.objects.create(
                realm=realm,
                event_time=event_time,
                event_type=RealmAuditLog.CUSTOMER_SWITCHED_FROM_MONTHLY_TO_ANNUAL_PLAN,
                extra_data=orjson.dumps(
                    {
                        "monthly_plan_id": plan.id,
                        "annual_plan_id": new_plan.id,
                    }
                ).decode(),
            )
            return new_plan, new_plan_ledger_entry

        if plan.status == CustomerPlan.SWITCH_NOW_FROM_STANDARD_TO_PLUS:
            standard_plan = plan
            standard_plan.end_date = next_billing_cycle
            standard_plan.status = CustomerPlan.ENDED
            standard_plan.save(update_fields=["status", "end_date"])

            (_, _, _, plus_plan_price_per_license) = compute_plan_parameters(
                CustomerPlan.PLUS,
                standard_plan.automanage_licenses,
                standard_plan.billing_schedule,
                standard_plan.customer.default_discount,
            )
            plus_plan_billing_cycle_anchor = standard_plan.end_date.replace(microsecond=0)

            plus_plan = CustomerPlan.objects.create(
                customer=standard_plan.customer,
                status=CustomerPlan.ACTIVE,
                automanage_licenses=standard_plan.automanage_licenses,
                charge_automatically=standard_plan.charge_automatically,
                price_per_license=plus_plan_price_per_license,
                discount=standard_plan.customer.default_discount,
                billing_schedule=standard_plan.billing_schedule,
                tier=CustomerPlan.PLUS,
                billing_cycle_anchor=plus_plan_billing_cycle_anchor,
                invoicing_status=CustomerPlan.INITIAL_INVOICE_TO_BE_SENT,
                next_invoice_date=plus_plan_billing_cycle_anchor,
            )

            standard_plan_last_ledger = (
                LicenseLedger.objects.filter(plan=standard_plan).order_by("id").last()
            )
            assert standard_plan_last_ledger is not None
            licenses_for_plus_plan = standard_plan_last_ledger.licenses_at_next_renewal
            assert licenses_for_plus_plan is not None
            plus_plan_ledger_entry = LicenseLedger.objects.create(
                plan=plus_plan,
                is_renewal=True,
                event_time=plus_plan_billing_cycle_anchor,
                licenses=licenses_for_plus_plan,
                licenses_at_next_renewal=licenses_for_plus_plan,
            )
            return plus_plan, plus_plan_ledger_entry

        if plan.status == CustomerPlan.DOWNGRADE_AT_END_OF_CYCLE:
            process_downgrade(plan)
        return None, None
    return None, last_ledger_entry
Ejemplo n.º 13
0
 async def send(self, data: dict) -> None:
     """Send data to the WebSocket"""
     if not self._ws:
         log.error("Error: Cannot send to uninitialized websocket")
         return
     await self._ws.send_bytes(orjson.dumps(data))
Ejemplo n.º 14
0
 def _to_json(obj: Any) -> str:  # type: ignore
     return orjson.dumps(obj).decode('utf-8')
Ejemplo n.º 15
0
    async def post(self, request: HTTPConnection):
        id = request.path_params["id"]

        writeup = await Writeup.get(id)

        if writeup is None:
            return abort(404, "Writeup not found")

        if not can_edit(request, writeup.author_id):
            return abort(400)

        form = await request.form()

        form = WriteupForm(form)

        is_valid = form.validate()

        if not slug(form.title.data):
            is_valid = False
            form.title.errors.append(
                "A valid url-safe name cannot be generated for this title."
            )

        if writeup.title != form.title.data:
            if (
                await Writeup.query.where(
                    sa.or_(
                        Writeup.title == form.title.data,
                        Writeup.slug == slug(form.title.data),
                    )
                ).gino.first()
                is not None
            ):
                is_valid = False
                form.title.errors.append(
                    f"A writeup with the title conflicting with '{form.title.data}' already exists."
                )

        if is_valid:
            await writeup.update_auto(
                title=form.title.data,
                tags=form.tags.data,
                content=form.content.data,
                private=form.private.data,
            ).apply()

            url = request.url_for("writeups_view", slug=writeup.slug)
            await log_edit("writeup", writeup.title, request.user.username, url)

            return redirect_response(url=url)

        images = await encoded_existing_images(request)
        tags = orjson.dumps(await get_all_tags(True))

        return templates.TemplateResponse(
            "writeups/edit.j2",
            {
                "request": request,
                "form": form,
                "writeup": writeup,
                "existing_images": images,
                "existing_tags": tags,
            },
        )
Ejemplo n.º 16
0
    def test_update_profile_data_successfully(self) -> None:
        self.login('iago')
        realm = get_realm('zulip')
        fields = [
            ('Phone number', '*short* text data'),
            ('Biography', '~~short~~ **long** text data'),
            ('Favorite food', 'long short text data'),
            ('Favorite editor', 'vim'),
            ('Birthday', '1909-3-5'),
            ('Favorite website', 'https://zulip.com'),
            ('Mentor', [self.example_user("cordelia").id]),
            ('GitHub', 'zulip-mobile'),
        ]

        data = []
        for i, field_value in enumerate(fields):
            name, value = field_value
            field = CustomProfileField.objects.get(name=name, realm=realm)
            data.append({
                'id': field.id,
                'value': value,
                'field': field,
            })

        # Update value of field
        result = self.client_patch(
            "/json/users/me/profile_data",
            {
                "data":
                orjson.dumps([{
                    "id": f["id"],
                    "value": f["value"]
                } for f in data]).decode()
            },
        )
        self.assert_json_success(result)

        iago = self.example_user('iago')
        expected_value = {f['id']: f['value'] for f in data}
        expected_rendered_value: Dict[Union[int, float, str, None],
                                      Union[str, None]] = {}
        for f in data:
            if f['field'].is_renderable():
                expected_rendered_value[f['id']] = markdown_convert(f['value'])
            else:
                expected_rendered_value[f['id']] = None

        for field_dict in iago.profile_data:
            self.assertEqual(field_dict['value'],
                             expected_value[field_dict['id']])
            self.assertEqual(field_dict['rendered_value'],
                             expected_rendered_value[field_dict['id']])
            for k in ['id', 'type', 'name', 'field_data']:
                self.assertIn(k, field_dict)

        # Update value of one field.
        field = CustomProfileField.objects.get(name='Biography', realm=realm)
        data = [{
            'id': field.id,
            'value': 'foobar',
        }]

        result = self.client_patch("/json/users/me/profile_data",
                                   {'data': orjson.dumps(data).decode()})
        self.assert_json_success(result)
        for field_dict in iago.profile_data:
            if field_dict['id'] == field.id:
                self.assertEqual(field_dict['value'], 'foobar')
Ejemplo n.º 17
0
def write_instrumentation_reports(full_suite: bool, include_webhooks: bool) -> None:
    if INSTRUMENTING:
        calls = INSTRUMENTED_CALLS

        from zproject.urls import urlpatterns, v1_api_and_json_patterns

        # Find our untested urls.
        pattern_cnt: Dict[str, int] = collections.defaultdict(int)

        def re_strip(r: Any) -> str:
            return str(r).lstrip("^").rstrip("$")

        def find_patterns(patterns: List[Any], prefixes: List[str]) -> None:
            for pattern in patterns:
                find_pattern(pattern, prefixes)

        def cleanup_url(url: str) -> str:
            if url.startswith("/"):
                url = url[1:]
            if url.startswith("http://testserver/"):
                url = url[len("http://testserver/") :]
            if url.startswith("http://zulip.testserver/"):
                url = url[len("http://zulip.testserver/") :]
            if url.startswith("http://testserver:9080/"):
                url = url[len("http://testserver:9080/") :]
            return url

        def find_pattern(pattern: Any, prefixes: List[str]) -> None:

            if isinstance(pattern, type(URLResolver)):
                return  # nocoverage -- shouldn't actually happen

            if hasattr(pattern, "url_patterns"):
                return

            canon_pattern = prefixes[0] + re_strip(pattern.pattern.regex.pattern)
            cnt = 0
            for call in calls:
                if "pattern" in call:
                    continue

                url = cleanup_url(call["url"])

                for prefix in prefixes:
                    if url.startswith(prefix):
                        match_url = url[len(prefix) :]
                        if pattern.resolve(match_url):
                            if call["status_code"] in [200, 204, 301, 302]:
                                cnt += 1
                            call["pattern"] = canon_pattern
            pattern_cnt[canon_pattern] += cnt

        find_patterns(urlpatterns, ["", "en/", "de/"])
        find_patterns(v1_api_and_json_patterns, ["api/v1/", "json/"])

        assert len(pattern_cnt) > 100
        untested_patterns = {p.replace("\\", "") for p in pattern_cnt if pattern_cnt[p] == 0}

        exempt_patterns = {
            # We exempt some patterns that are called via Tornado.
            "api/v1/events",
            "api/v1/events/internal",
            "api/v1/register",
            # We also exempt some development environment debugging
            # static content URLs, since the content they point to may
            # or may not exist.
            "coverage/(?P<path>.+)",
            "confirmation_key/",
            "node-coverage/(?P<path>.+)",
            "docs/(?P<path>.+)",
            "help/delete-a-stream",
            "api/delete-stream",
            "casper/(?P<path>.+)",
            "static/(?P<path>.+)",
            "flush_caches",
            *(webhook.url for webhook in WEBHOOK_INTEGRATIONS if not include_webhooks),
        }

        untested_patterns -= exempt_patterns

        var_dir = "var"  # TODO make sure path is robust here
        fn = os.path.join(var_dir, "url_coverage.txt")
        with open(fn, "wb") as f:
            for call in calls:
                f.write(orjson.dumps(call, option=orjson.OPT_APPEND_NEWLINE))

        if full_suite:
            print(f"INFO: URL coverage report is in {fn}")
            print("INFO: Try running: ./tools/create-test-api-docs")

        if full_suite and len(untested_patterns):  # nocoverage -- test suite error handling
            print("\nERROR: Some URLs are untested!  Here's the list of untested URLs:")
            for untested_pattern in sorted(untested_patterns):
                print(f"   {untested_pattern}")
            sys.exit(1)
Ejemplo n.º 18
0
    def test_create_choice_field(self) -> None:
        self.login('iago')
        data: Dict[str, Union[str, int]] = {}
        data["name"] = "Favorite programming language"
        data["field_type"] = CustomProfileField.CHOICE

        data['field_data'] = 'invalid'
        result = self.client_post("/json/realm/profile_fields", info=data)
        error_msg = "Bad value for 'field_data': invalid"
        self.assert_json_error(result, error_msg)

        data["field_data"] = orjson.dumps({
            'python': ['1'],
            'java': ['2'],
        }).decode()
        result = self.client_post("/json/realm/profile_fields", info=data)
        self.assert_json_error(result, 'field_data is not a dict')

        data["field_data"] = orjson.dumps({
            'python': {
                'text': 'Python'
            },
            'java': {
                'text': 'Java'
            },
        }).decode()
        result = self.client_post("/json/realm/profile_fields", info=data)
        self.assert_json_error(result, "order key is missing from field_data")

        data["field_data"] = orjson.dumps({
            'python': {
                'text': 'Python',
                'order': ''
            },
            'java': {
                'text': 'Java',
                'order': '2'
            },
        }).decode()
        result = self.client_post("/json/realm/profile_fields", info=data)
        self.assert_json_error(result, 'field_data["order"] cannot be blank.')

        data["field_data"] = orjson.dumps({
            '': {
                'text': 'Python',
                'order': '1'
            },
            'java': {
                'text': 'Java',
                'order': '2'
            },
        }).decode()
        result = self.client_post("/json/realm/profile_fields", info=data)
        self.assert_json_error(result, "'value' cannot be blank.")

        data["field_data"] = orjson.dumps({
            'python': {
                'text': 'Python',
                'order': 1
            },
            'java': {
                'text': 'Java',
                'order': '2'
            },
        }).decode()
        result = self.client_post("/json/realm/profile_fields", info=data)
        self.assert_json_error(result, 'field_data["order"] is not a string')

        data["field_data"] = orjson.dumps({}).decode()
        result = self.client_post("/json/realm/profile_fields", info=data)
        self.assert_json_error(result, 'Field must have at least one choice.')

        data["field_data"] = orjson.dumps({
            'python': {
                'text': 'Python',
                'order': '1'
            },
            'java': {
                'text': 'Java',
                'order': '2'
            },
        }).decode()
        result = self.client_post("/json/realm/profile_fields", info=data)
        self.assert_json_success(result)
Ejemplo n.º 19
0
    async def store(self, oid, old_serial, writer, obj, txn):
        assert oid is not None

        pickled = writer.serialize()  # This calls __getstate__ of obj
        if len(pickled) >= self._large_record_size:
            log.info(f"Large object {obj.__class__}: {len(pickled)}")
        if self._store_json:
            json_dict = await writer.get_json()
            json = orjson.dumps(json_dict).decode("utf-8")
        else:
            json = None
        part = writer.part
        if part is None:
            part = 0

        update = False
        statement_sql = self._sql.get("NAIVE_UPSERT", self.objects_table_name)
        if not obj.__new_marker__ and obj.__serial__ is not None:
            # we should be confident this is an object update
            statement_sql = self._sql.get("UPDATE", self.objects_table_name)
            update = True

        conn = await txn.get_connection()
        async with watch_lock(txn._lock, "store_object"):
            try:
                with watch("store_object"):
                    result = await conn.fetch(
                        statement_sql,
                        oid,  # The OID of the object
                        txn._tid,  # Our TID
                        len(pickled),  # Len of the object
                        part,  # Partition indicator
                        writer.resource,  # Is a resource ?
                        writer.of,  # It belogs to a main
                        old_serial,  # Old serial
                        writer.parent_id,  # Parent OID
                        writer.id,  # Traversal ID
                        writer.type,  # Guillotina type
                        json,  # JSON catalog
                        pickled,  # Pickle state)
                    )
            except asyncpg.exceptions.UniqueViolationError as ex:
                if "Key (parent_id, id)" in ex.detail or "Key (of, id)" in ex.detail:
                    raise ConflictIdOnContainer(ex)
                raise
            except asyncpg.exceptions.ForeignKeyViolationError:
                txn.deleted[obj.__uuid__] = obj
                raise TIDConflictError(
                    "Bad value inserting into database that could be caused "
                    "by a bad cache value. This should resolve on request retry.",
                    oid,
                    txn,
                    old_serial,
                    writer,
                )
            except asyncpg.exceptions._base.InterfaceError as ex:
                if "another operation is in progress" in ex.args[0]:
                    raise ConflictError(
                        "asyncpg error, another operation in progress.", oid,
                        txn, old_serial, writer)
                raise
            except asyncpg.exceptions.DeadlockDetectedError:
                raise ConflictError("Deadlock detected.", oid, txn, old_serial,
                                    writer)
            if len(result) != 1 or result[0]["count"] != 1:
                if update:
                    # raise tid conflict error
                    raise TIDConflictError(
                        "Mismatch of tid of object being updated. This is likely "
                        "caused by a cache invalidation race condition and should "
                        "be an edge case. This should resolve on request retry.",
                        oid,
                        txn,
                        old_serial,
                        writer,
                    )
                else:
                    log.error("Incorrect response count from database update. "
                              "This should not happen. tid: {}".format(
                                  txn._tid))
        await txn._cache.store_object(obj, pickled)
Ejemplo n.º 20
0
 def cron_trigger(self, trigger: schemas.ScheduleCronTrigger):
     self.cron_trigger_str = orjson.dumps(
         trigger.dict(exclude_unset=True))
Ejemplo n.º 21
0
    def consume(self, event: Dict[str, Any]) -> None:
        start = time.time()
        if event["type"] == "mark_stream_messages_as_read":
            user_profile = get_user_profile_by_id(event["user_profile_id"])

            for recipient_id in event["stream_recipient_ids"]:
                count = do_mark_stream_messages_as_read(
                    user_profile, recipient_id)
                logger.info(
                    "Marked %s messages as read for user %s, stream_recipient_id %s",
                    count,
                    user_profile.id,
                    recipient_id,
                )
        elif event["type"] == "mark_stream_messages_as_read_for_everyone":
            # This event is generated by the stream deactivation code path.
            batch_size = 100
            offset = 0
            while True:
                messages = Message.objects.filter(
                    recipient_id=event["stream_recipient_id"]).order_by(
                        "id")[offset:offset + batch_size]
                UserMessage.objects.filter(message__in=messages).extra(
                    where=[UserMessage.where_unread()]).update(
                        flags=F("flags").bitor(UserMessage.flags.read))
                offset += len(messages)
                if len(messages) < batch_size:
                    break
            logger.info(
                "Marked %s messages as read for all users, stream_recipient_id %s",
                offset,
                event["stream_recipient_id"],
            )
        elif event["type"] == "clear_push_device_tokens":
            try:
                clear_push_device_tokens(event["user_profile_id"])
            except PushNotificationBouncerRetryLaterError:

                def failure_processor(event: Dict[str, Any]) -> None:
                    logger.warning(
                        "Maximum retries exceeded for trigger:%s event:clear_push_device_tokens",
                        event["user_profile_id"],
                    )

                retry_event(self.queue_name, event, failure_processor)
        elif event["type"] == "realm_export":
            realm = Realm.objects.get(id=event["realm_id"])
            output_dir = tempfile.mkdtemp(prefix="zulip-export-")
            export_event = RealmAuditLog.objects.get(id=event["id"])
            user_profile = get_user_profile_by_id(event["user_profile_id"])

            try:
                public_url = export_realm_wrapper(
                    realm=realm,
                    output_dir=output_dir,
                    threads=6,
                    upload=True,
                    public_only=True,
                    delete_after_upload=True,
                )
            except Exception:
                export_event.extra_data = orjson.dumps(
                    dict(failed_timestamp=timezone_now().timestamp(),
                         )).decode()
                export_event.save(update_fields=["extra_data"])
                logging.error(
                    "Data export for %s failed after %s",
                    user_profile.realm.string_id,
                    time.time() - start,
                )
                notify_realm_export(user_profile)
                return

            assert public_url is not None

            # Update the extra_data field now that the export is complete.
            export_event.extra_data = orjson.dumps(
                dict(export_path=urllib.parse.urlparse(public_url).path,
                     )).decode()
            export_event.save(update_fields=["extra_data"])

            # Send a private message notification letting the user who
            # triggered the export know the export finished.
            with override_language(user_profile.default_language):
                content = _(
                    "Your data export is complete and has been uploaded here:\n\n{public_url}"
                ).format(public_url=public_url)
            internal_send_private_message(
                sender=get_system_bot(settings.NOTIFICATION_BOT, realm.id),
                recipient_user=user_profile,
                content=content,
            )

            # For future frontend use, also notify administrator
            # clients that the export happened.
            notify_realm_export(user_profile)
            logging.info(
                "Completed data export for %s in %s",
                user_profile.realm.string_id,
                time.time() - start,
            )

        end = time.time()
        logger.info("deferred_work processed %s event (%dms)", event["type"],
                    (end - start) * 1000)
Ejemplo n.º 22
0
output_dir = pathlib.Path(sys.argv[1])
input_dir = pathlib.Path(sys.argv[2])

json_filepaths = input_dir.glob("*.ndjson")

parsed_at_timestamp = datetime.datetime.utcnow().isoformat()

for in_filepath in json_filepaths:
    filename, _ = os.path.splitext(in_filepath.name)
    out_filepath = output_dir / f"{filename}.normalized.ndjson"

    logger.info(
        "normalizing %s => %s",
        in_filepath,
        out_filepath,
    )

    with in_filepath.open("rb") as fin:
        with out_filepath.open("wb") as fout:
            # Optimization: faster to batch all of the sites into a single
            # write than to do ~46k separate writes.
            # Optimization: using orjson rather than json.
            fout.write(b"\n".join(
                orjson.dumps(
                    _get_normalized_location(
                        orjson.loads(site_json),
                        parsed_at_timestamp,
                        filename,
                    ).dict()) for site_json in fin))
            fout.write(b"\n")
Ejemplo n.º 23
0
 def render(self, content: Any) -> bytes:
     return orjson.dumps(content)
Ejemplo n.º 24
0
    def test_search(self) -> None:
        reset_emails_in_zulip_realm()

        def check_hamlet_user_query_result(result: HttpResponse) -> None:
            self.assert_in_success_response(['<span class="label">user</span>\n', '<h3>King Hamlet</h3>',
                                             '<b>Email</b>: [email protected]', '<b>Is active</b>: True<br>',
                                             '<b>Admins</b>: [email protected], [email protected]\n',
                                             'class="copy-button" data-copytext="[email protected], [email protected]"',
                                             ], result)

        def check_zulip_realm_query_result(result: HttpResponse) -> None:
            zulip_realm = get_realm("zulip")
            self.assert_in_success_response([f'<input type="hidden" name="realm_id" value="{zulip_realm.id}"',
                                             'Zulip Dev</h3>',
                                             '<option value="1" selected>Self Hosted</option>',
                                             '<option value="2" >Limited</option>',
                                             'input type="number" name="discount" value="None"',
                                             '<option value="active" selected>Active</option>',
                                             '<option value="deactivated" >Deactivated</option>',
                                             'scrub-realm-button">',
                                             'data-string-id="zulip"'], result)

        def check_lear_realm_query_result(result: HttpResponse) -> None:
            lear_realm = get_realm("lear")
            self.assert_in_success_response([f'<input type="hidden" name="realm_id" value="{lear_realm.id}"',
                                             'Lear &amp; Co.</h3>',
                                             '<option value="1" selected>Self Hosted</option>',
                                             '<option value="2" >Limited</option>',
                                             'input type="number" name="discount" value="None"',
                                             '<option value="active" selected>Active</option>',
                                             '<option value="deactivated" >Deactivated</option>',
                                             'scrub-realm-button">',
                                             'data-string-id="lear"',
                                             '<b>Name</b>: Zulip Standard',
                                             '<b>Status</b>: Active',
                                             '<b>Billing schedule</b>: Annual',
                                             '<b>Licenses</b>: 2/10 (Manual)',
                                             '<b>Price per license</b>: $80.0',
                                             '<b>Payment method</b>: Send invoice',
                                             '<b>Next invoice date</b>: 02 January 2017',
                                             ], result)

        def check_preregistration_user_query_result(result: HttpResponse, email: str, invite: bool=False) -> None:
            self.assert_in_success_response(['<span class="label">preregistration user</span>\n',
                                             f'<b>Email</b>: {email}',
                                             ], result)
            if invite:
                self.assert_in_success_response(['<span class="label">invite</span>'], result)
                self.assert_in_success_response(['<b>Expires in</b>: 1\xa0week, 3',
                                                 '<b>Status</b>: Link has never been clicked'], result)
                self.assert_in_success_response([], result)
            else:
                self.assert_not_in_success_response(['<span class="label">invite</span>'], result)
                self.assert_in_success_response(['<b>Expires in</b>: 1\xa0day',
                                                 '<b>Status</b>: Link has never been clicked'], result)

        def check_realm_creation_query_result(result: HttpResponse, email: str) -> None:
            self.assert_in_success_response(['<span class="label">preregistration user</span>\n',
                                             '<span class="label">realm creation</span>\n',
                                             '<b>Link</b>: http://testserver/accounts/do_confirm/',
                                             '<b>Expires in</b>: 1\xa0day<br>\n',
                                             ], result)

        def check_multiuse_invite_link_query_result(result: HttpResponse) -> None:
            self.assert_in_success_response(['<span class="label">multiuse invite</span>\n',
                                             '<b>Link</b>: http://zulip.testserver/join/',
                                             '<b>Expires in</b>: 1\xa0week, 3',
                                             ], result)

        def check_realm_reactivation_link_query_result(result: HttpResponse) -> None:
            self.assert_in_success_response(['<span class="label">realm reactivation</span>\n',
                                             '<b>Link</b>: http://zulip.testserver/reactivate/',
                                             '<b>Expires in</b>: 1\xa0day',
                                             ], result)

        self.login('cordelia')

        result = self.client_get("/activity/support")
        self.assertEqual(result.status_code, 302)
        self.assertEqual(result["Location"], "/login/")

        self.login('iago')

        customer = Customer.objects.create(realm=get_realm("lear"), stripe_customer_id='cus_123')
        now = datetime(2016, 1, 2, tzinfo=timezone.utc)
        plan = CustomerPlan.objects.create(customer=customer, billing_cycle_anchor=now,
                                           billing_schedule=CustomerPlan.ANNUAL, tier=CustomerPlan.STANDARD,
                                           price_per_license=8000, next_invoice_date=add_months(now, 12))
        LicenseLedger.objects.create(licenses=10, licenses_at_next_renewal=10, event_time=timezone_now(),
                                     is_renewal=True, plan=plan)

        result = self.client_get("/activity/support")
        self.assert_in_success_response(['<input type="text" name="q" class="input-xxlarge search-query"'], result)

        result = self.client_get("/activity/support", {"q": "*****@*****.**"})
        check_hamlet_user_query_result(result)
        check_zulip_realm_query_result(result)

        result = self.client_get("/activity/support", {"q": "lear"})
        check_lear_realm_query_result(result)

        result = self.client_get("/activity/support", {"q": "http://lear.testserver"})
        check_lear_realm_query_result(result)

        with self.settings(REALM_HOSTS={'zulip': 'localhost'}):
            result = self.client_get("/activity/support", {"q": "http://localhost"})
            check_zulip_realm_query_result(result)

        result = self.client_get("/activity/support", {"q": "[email protected], lear"})
        check_hamlet_user_query_result(result)
        check_zulip_realm_query_result(result)
        check_lear_realm_query_result(result)

        result = self.client_get("/activity/support", {"q": "lear, Hamlet <*****@*****.**>"})
        check_hamlet_user_query_result(result)
        check_zulip_realm_query_result(result)
        check_lear_realm_query_result(result)

        self.client_post('/accounts/home/', {'email': self.nonreg_email("test")})
        self.login('iago')
        result = self.client_get("/activity/support", {"q": self.nonreg_email("test")})
        check_preregistration_user_query_result(result, self.nonreg_email("test"))
        check_zulip_realm_query_result(result)

        stream_ids = [self.get_stream_id("Denmark")]
        invitee_emails = [self.nonreg_email("test1")]
        self.client_post("/json/invites", {"invitee_emails": invitee_emails,
                                           "stream_ids": orjson.dumps(stream_ids).decode(),
                                           "invite_as": PreregistrationUser.INVITE_AS['MEMBER']})
        result = self.client_get("/activity/support", {"q": self.nonreg_email("test1")})
        check_preregistration_user_query_result(result, self.nonreg_email("test1"), invite=True)
        check_zulip_realm_query_result(result)

        email = self.nonreg_email('alice')
        self.client_post('/new/', {'email': email})
        result = self.client_get("/activity/support", {"q": email})
        check_realm_creation_query_result(result, email)

        do_create_multiuse_invite_link(self.example_user("hamlet"), invited_as=1)
        result = self.client_get("/activity/support", {"q": "zulip"})
        check_multiuse_invite_link_query_result(result)
        check_zulip_realm_query_result(result)
        MultiuseInvite.objects.all().delete()

        do_send_realm_reactivation_email(get_realm("zulip"))
        result = self.client_get("/activity/support", {"q": "zulip"})
        check_realm_reactivation_link_query_result(result)
        check_zulip_realm_query_result(result)
Ejemplo n.º 25
0
 def update_with_api(self, name: str, value: Union[int, str]) -> Realm:
     if not isinstance(value, str):
         value = orjson.dumps(value).decode()
     result = self.client_patch("/json/realm", {name: value})
     self.assert_json_success(result)
     return get_realm("zulip")  # refresh data
Ejemplo n.º 26
0
def write_instrumentation_reports(full_suite: bool,
                                  include_webhooks: bool) -> None:
    if INSTRUMENTING:
        calls = INSTRUMENTED_CALLS

        from zproject.urls import urlpatterns, v1_api_and_json_patterns

        # Find our untested urls.
        pattern_cnt: Dict[str, int] = collections.defaultdict(int)

        def re_strip(r: str) -> str:
            assert r.startswith(r"^")
            if r.endswith(r"$"):
                return r[1:-1]
            else:
                assert r.endswith(r"\Z")
                return r[1:-2]

        def find_patterns(patterns: List[Any], prefixes: List[str]) -> None:
            for pattern in patterns:
                find_pattern(pattern, prefixes)

        def cleanup_url(url: str) -> str:
            if url.startswith("/"):
                url = url[1:]
            if url.startswith("http://testserver/"):
                url = url[len("http://testserver/"):]
            if url.startswith("http://zulip.testserver/"):
                url = url[len("http://zulip.testserver/"):]
            if url.startswith("http://testserver:9080/"):
                url = url[len("http://testserver:9080/"):]
            return url

        def find_pattern(pattern: Any, prefixes: List[str]) -> None:

            if isinstance(pattern, type(URLResolver)):
                return  # nocoverage -- shouldn't actually happen

            if hasattr(pattern, "url_patterns"):
                return

            canon_pattern = prefixes[0] + re_strip(
                pattern.pattern.regex.pattern)
            cnt = 0
            for call in calls:
                if "pattern" in call:
                    continue

                url = cleanup_url(call["url"])

                for prefix in prefixes:
                    if url.startswith(prefix):
                        match_url = url[len(prefix):]
                        if pattern.resolve(match_url):
                            if call["status_code"] in [200, 204, 301, 302]:
                                cnt += 1
                            call["pattern"] = canon_pattern
            pattern_cnt[canon_pattern] += cnt

        find_patterns(urlpatterns, ["", "en/", "de/"])
        find_patterns(v1_api_and_json_patterns, ["api/v1/", "json/"])

        assert len(pattern_cnt) > 100
        untested_patterns = {
            p.replace("\\", "")
            for p in pattern_cnt if pattern_cnt[p] == 0
        }

        exempt_patterns = {
            # We exempt some patterns that are called via Tornado.
            "api/v1/events",
            "api/v1/events/internal",
            "api/v1/register",
            # We also exempt some development environment debugging
            # static content URLs, since the content they point to may
            # or may not exist.
            "coverage/(?P<path>.+)",
            "confirmation_key/",
            "node-coverage/(?P<path>.+)",
            "docs/(?P<path>.+)",
            "help/add-custom-emoji",
            "help/configure-who-can-add-custom-emoji",
            "help/change-the-topic-of-a-message",
            "help/configure-missed-message-emails",
            "help/community-topic-edits",
            "help/about-streams-and-topics",
            "help/delete-a-stream",
            "help/add-an-alert-word",
            "help/change-notification-sound",
            "help/configure-message-notification-emails",
            "help/disable-new-login-emails",
            "help/test-mobile-notifications",
            "help/troubleshooting-desktop-notifications",
            "for/working-groups-and-communities/",
            "help/only-allow-admins-to-add-emoji",
            "help/night-mode",
            "api/delete-stream",
            "casper/(?P<path>.+)",
            "static/(?P<path>.+)",
            "flush_caches",
            "external_content/(?P<digest>[^/]+)/(?P<received_url>[^/]+)",
            # These are SCIM2 urls overridden from django-scim2 to return Not Implemented.
            # We actually test them, but it's not being detected as a tested pattern,
            # possibly due to the use of re_path. TODO: Investigate and get them
            # recognized as tested.
            "scim/v2/",
            "scim/v2/.search",
            "scim/v2/Bulk",
            "scim/v2/Me",
            "scim/v2/ResourceTypes(?:/(?P<uuid>[^/]+))?",
            "scim/v2/Schemas(?:/(?P<uuid>[^/]+))?",
            "scim/v2/ServiceProviderConfig",
            "scim/v2/Groups(?:/(?P<uuid>[^/]+))?",
            "scim/v2/Groups/.search",
            *(webhook.url
              for webhook in WEBHOOK_INTEGRATIONS if not include_webhooks),
        }

        untested_patterns -= exempt_patterns

        var_dir = "var"  # TODO make sure path is robust here
        fn = os.path.join(var_dir, "url_coverage.txt")
        with open(fn, "wb") as f:
            for call in calls:
                f.write(orjson.dumps(call, option=orjson.OPT_APPEND_NEWLINE))

        if full_suite:
            print(f"INFO: URL coverage report is in {fn}")
            print("INFO: Try running: ./tools/create-test-api-docs")

        if full_suite and len(
                untested_patterns):  # nocoverage -- test suite error handling
            print(
                "\nERROR: Some URLs are untested!  Here's the list of untested URLs:"
            )
            for untested_pattern in sorted(untested_patterns):
                print(f"   {untested_pattern}")
            sys.exit(1)
Ejemplo n.º 27
0
def nest_get_spikes_by_node_collection(node_collection_id,
                                       from_time=None,
                                       to_time=None,
                                       skip=None,
                                       top=None):  # noqa: E501
    """Retrieves the spikes for the given simulation steps (optional) and node collection. This request merges the spikes recorded by all spike detectors and removes duplicates.

     # noqa: E501

    :param node_collection_id: The identifier of the node collection.
    :type node_collection_id: int
    :param from_time: The start time (including) to be queried.
    :type from_time: float
    :param to_time: The end time (excluding) to be queried.
    :type to_time: float
    :param skip: The offset into the result.
    :type skip: int
    :param top: The maximum numbers of entries to be returned.
    :type top: int

    :rtype: Spikes
    """
    spikes = Spikes([], [])
    simulation_times = []
    node_id_list = []
    lastFrame = False
    for node in simulation_nodes.nest_simulation_nodes:
        response = requests.get(node + "/spikes",
                                params={
                                    "fromTime": from_time,
                                    "toTime": to_time,
                                    "nodeCollectionId": node_collection_id
                                })
        response = orjson.loads(response.content)
        lastFrame = response["lastFrame"]
        simulation_times = simulation_times + response["simulationTimes"]
        node_id_list = node_id_list + response["nodeIds"]

    # sort
    sorted_lists = sort_together([simulation_times, node_id_list])
    if sorted_lists != []:
        spikes.simulation_times = sorted_lists[0]
        spikes.node_ids = sorted_lists[1]

    # offset and limit
    if (skip is None):
        skip = 0
    if (top is None or (top + skip) > len(spikes.node_ids)):
        top = len(spikes.node_ids) - skip
    spikes.node_ids = spikes.node_ids[skip:skip + top]
    spikes.simulation_times = spikes.simulation_times[skip:skip + top]

    json_string = orjson.dumps({
        "nodeIds": spikes.node_ids,
        "simulationTimes": spikes.simulation_times,
        "lastFrame": lastFrame
    })
    return ConnexionResponse(status_code=200,
                             content_type='application/json',
                             mimetype='text/plain',
                             body=json_string)
Ejemplo n.º 28
0
def _create_response(response_body: Any, status_code: int) -> Response:
    return Response(
        response=orjson.dumps(response_body),
        status=status_code,
        mimetype="application/json",
    )
Ejemplo n.º 29
0
def nest_get_multimeter_measurements(multimeter_id,
                                     attribute_name,
                                     from_time=None,
                                     to_time=None,
                                     node_ids=None,
                                     skip=0,
                                     top=0):  # noqa: E501
    """Retrieves the measurements for a multimeter, attribute and node IDs (optional).

     # noqa: E501

    :param multimeter_id: The multimeter to query
    :type multimeter_id: int
    :param attribute_name: The attribute to query (e.g., &#39;V_m&#39; for the membrane potential)
    :type attribute_name: str
    :param from_time: The start time (including) to be queried.
    :type from_time: float
    :param to_time: The end time (excluding) to be queried.
    :type to_time: float
    :param node_ids: A list of node IDs queried for attribute data.
    :type node_ids: List[int]
    :param skip: The offset into the result.
    :type skip: int
    :param top: The maximum number of entries to be returned.
    :type top: int

    :rtype: MultimeterMeasurement
    """
    #TODO Cache this
    mult_found, mult_nodes = nest_get_nodes_by_multimeter_id(multimeter_id)

    node_id_params = node_ids
    if node_ids == None and mult_found:
        node_ids = mult_nodes
    else:
        for node_id in node_ids:
            if node_id not in mult_nodes:
                error = Error(code="InvalidMultimeterRequest",
                              message="Node " + str(node_id) +
                              " is not monitored by given Multimeter")
                error_response = ErrorResponse(error)
                return error_response, 400

    init = True
    sim_times = []
    if simulation_nodes.nest_simulation_nodes == None:
        return

    for node in simulation_nodes.nest_simulation_nodes:

        if node_id_params is not None:
            node_id_param = ",".join(map(str, node_id_params))
        else:
            node_id_param = None

        response = requests.get(node + "/multimeter_measurement",
                                params={
                                    "multimeterId": multimeter_id,
                                    "attribute": attribute_name,
                                    "fromTime": from_time,
                                    "toTime": to_time,
                                    "nodeIds": node_id_param
                                })
        response = orjson.loads(response.content)

        if init:
            sim_times = response["simulationTimes"]
            multimeter_values = [
                None for _ in range(0, (len(sim_times) * len(node_ids)))
            ]
            init = False
        for x in range(len(response['nodeIds'])):
            node_id = response['nodeIds'][x]
            index = node_ids.index(node_id)
            index_offset = index * len(sim_times)
            for y in range(len(sim_times)):
                multimeter_values[index_offset +
                                  y] = response['values'][x * len(sim_times) +
                                                          y]

    # offset and limit
    if skip > 0 or top > 0:
        print("slicing")
        top = len(node_ids) - skip
        node_ids = node_ids[skip:skip + top]
        multimeter_values = multimeter_values[skip *
                                              len(sim_times):(skip + top) *
                                              len(sim_times)]

    json_string = orjson.dumps({
        "simulationTimes": sim_times,
        "nodeIds": node_ids,
        "values": multimeter_values
    })
    return ConnexionResponse(status_code=200,
                             content_type='application/json',
                             mimetype='text/plain',
                             body=json_string)
Ejemplo n.º 30
0
 def test_dict_keys_substr(self):
     self.assertEqual(
         orjson.dumps({SubStr("aaa"): True},
                      option=orjson.OPT_NON_STR_KEYS),
         b'{"aaa":true}',
     )