def generate_secrets(development: bool = False) -> None: if development: OUTPUT_SETTINGS_FILENAME = "zproject/dev-secrets.conf" else: OUTPUT_SETTINGS_FILENAME = "/etc/zulip/zulip-secrets.conf" current_conf = get_old_conf(OUTPUT_SETTINGS_FILENAME) lines: List[str] = [] if len(current_conf) == 0: lines = ['[secrets]\n'] def need_secret(name: str) -> bool: return name not in current_conf def add_secret(name: str, value: str) -> None: lines.append(f"{name} = {value}\n") current_conf[name] = value for name in AUTOGENERATED_SETTINGS: if need_secret(name): add_secret(name, random_token()) # These secrets are exclusive to a Zulip development environment. # We use postgres peer authentication by default in production, # and initial_password_salt is used to generate passwords for the # test/development database users. See `manage.py # print_initial_password`. if development and need_secret("initial_password_salt"): add_secret("initial_password_salt", random_token()) if development and need_secret("local_database_password"): add_secret("local_database_password", random_token()) # The core Django SECRET_KEY setting, used by Django internally to # secure sessions. If this gets changed, all users will be logged out. if need_secret('secret_key'): secret_key = generate_django_secretkey() add_secret('secret_key', secret_key) # To prevent Django ImproperlyConfigured error from zproject import settings settings.SECRET_KEY = secret_key # Secret key for the Camo HTTPS proxy. if need_secret('camo_key'): add_secret('camo_key', random_string(64)) if not development: # The memcached_password and redis_password secrets are only # required/relevant in production. # Password for authentication to memcached. if need_secret("memcached_password"): # We defer importing settings unless we need it, because # importing settings is expensive (mostly because of # django-auth-ldap) and we want the noop case to be fast. from zproject import settings if settings.MEMCACHED_LOCATION == "127.0.0.1:11211": add_secret("memcached_password", random_token()) # Password for authentication to redis. if need_secret("redis_password"): # We defer importing settings unless we need it, because # importing settings is expensive (mostly because of # django-auth-ldap) and we want the noop case to be fast. from zproject import settings if settings.REDIS_HOST == "127.0.0.1": # To prevent Puppet from restarting Redis, which would lose # data because we configured Redis to disable persistence, set # the Redis password on the running server and edit the config # file directly. import redis from zerver.lib.redis_utils import get_redis_client redis_password = random_token() for filename in ["/etc/redis/zuli-redis.conf", "/etc/redis/zulip-redis.conf"]: if os.path.exists(filename): with open(filename, "a") as f: f.write( "# Set a Redis password based on zulip-secrets.conf\n" f"requirepass '{redis_password}'\n", ) break try: get_redis_client().config_set("requirepass", redis_password) except redis.exceptions.ConnectionError: pass add_secret("redis_password", redis_password) # Random id and secret used to identify this installation when # accessing the Zulip mobile push notifications service. # * zulip_org_key is generated using os.urandom(). # * zulip_org_id only needs to be unique, so we use a UUID. if need_secret('zulip_org_key'): add_secret('zulip_org_key', random_string(64)) if need_secret('zulip_org_id'): add_secret('zulip_org_id', str(uuid.uuid4())) if len(lines) == 0: print("generate_secrets: No new secrets to generate.") return with open(OUTPUT_SETTINGS_FILENAME, 'a') as f: # Write a newline at the start, in case there was no newline at # the end of the file due to human editing. f.write("\n" + "".join(lines)) print(f"Generated new secrets in {OUTPUT_SETTINGS_FILENAME}.")
if "to" in debug_info: scrubbed_error = u"Stream: %s\n%s" % (redact_stream( debug_info["to"]), scrubbed_error) if "stream" in debug_info: scrubbed_error = u"Realm: %s\n%s" % (debug_info["stream"].realm.domain, scrubbed_error) logger.error(scrubbed_error) report_to_zulip(scrubbed_error) # Temporary missed message addresses redis_client = get_redis_client() def missed_message_redis_key(token): # type: (text_type) -> text_type return 'missed_message:' + token def is_missed_message_address(address): # type: (text_type) -> bool msg_string = get_email_gateway_message_string_from_address(address) return msg_string.startswith('mm') and len(msg_string) == 34 def get_missed_message_token_from_address(address):
def __init__(self): # type: () -> None super(MessageSenderWorker, self).__init__() self.redis_client = get_redis_client() self.handler = BaseHandler() self.handler.load_middleware()
from __future__ import absolute_import from django.conf import settings from zerver.lib.redis_utils import get_redis_client import redis import time import logging from itertools import izip # Implement a rate-limiting scheme inspired by the one described here, but heavily modified # http://blog.domaintools.com/2013/04/rate-limiting-with-redis/ client = get_redis_client() rules = settings.RATE_LIMITING_RULES def _rules_for_user(user): if user.rate_limits != "": return [[int(l) for l in limit.split(':')] for limit in user.rate_limits.split(',')] return rules def redis_key(user, domain): """Return the redis keys for this user""" return ["ratelimit:%s:%s:%s:%s" % (type(user), user.id, domain, keytype) for keytype in ['list', 'zset', 'block']] def max_api_calls(user): "Returns the API rate limit for the highest limit" return _rules_for_user(user)[-1][1] def max_api_window(user): "Returns the API time window for the highest limit"
def __init__(self) -> None: super().__init__() self.redis_client = get_redis_client() self.handler = BaseHandler() self.handler.load_middleware()
def __init__(self): super(MessageSenderWorker, self).__init__() self.redis_client = get_redis_client() self.handler = BaseHandler() self.handler.load_middleware()
def generate_secrets(development: bool = False) -> None: if development: OUTPUT_SETTINGS_FILENAME = "zproject/dev-secrets.conf" else: OUTPUT_SETTINGS_FILENAME = "/etc/zulip/zulip-secrets.conf" current_conf = get_old_conf(OUTPUT_SETTINGS_FILENAME) lines = [] # type: List[str] if len(current_conf) == 0: lines = ['[secrets]\n'] def need_secret(name: str) -> bool: return name not in current_conf def add_secret(name: str, value: str) -> None: lines.append("%s = %s\n" % (name, value)) current_conf[name] = value for name in AUTOGENERATED_SETTINGS: if need_secret(name): add_secret(name, generate_random_token(64)) if development and need_secret("initial_password_salt"): add_secret("initial_password_salt", generate_random_token(64)) if development and need_secret("local_database_password"): add_secret("local_database_password", generate_random_token(64)) if need_secret('secret_key'): secret_key = generate_django_secretkey() add_secret('secret_key', secret_key) # To prevent Django ImproperlyConfigured error settings.SECRET_KEY = secret_key if need_secret('camo_key'): add_secret('camo_key', get_random_string(64)) if (not development and settings.MEMCACHED_LOCATION == "127.0.0.1:11211" and need_secret("memcached_password")): add_secret("memcached_password", generate_random_token(64)) if (not development and settings.REDIS_HOST == "127.0.0.1" and need_secret("redis_password")): # To prevent Puppet from restarting Redis, which would lose # data because we configured Redis to disable persistence, set # the Redis password on the running server and edit the config # file directly. import redis from zerver.lib.redis_utils import get_redis_client redis_password = generate_random_token(64) for filename in [ "/etc/redis/zuli-redis.conf", "/etc/redis/zulip-redis.conf" ]: if os.path.exists(filename): with open(filename, "a") as f: f.write( "# Set a Redis password based on zulip-secrets.conf\n" "requirepass '%s'\n" % (redis_password, )) break try: get_redis_client().config_set("requirepass", redis_password) except redis.exceptions.ConnectionError: pass add_secret("redis_password", redis_password) # zulip_org_key is generated using os.urandom(). # zulip_org_id does not require a secure CPRNG, # it only needs to be unique. if need_secret('zulip_org_key'): add_secret('zulip_org_key', get_random_string(64)) if need_secret('zulip_org_id'): add_secret('zulip_org_id', str(uuid.uuid4())) if len(lines) == 0: print("generate_secrets: No new secrets to generate.") return with open(OUTPUT_SETTINGS_FILENAME, 'a') as f: # Write a newline at the start, in case there was no newline at # the end of the file due to human editing. f.write("\n" + "".join(lines)) print("Generated new secrets in %s." % (OUTPUT_SETTINGS_FILENAME, ))
def setUpClass(cls) -> None: cls.redis_client = get_redis_client() return super().setUpClass()
def move_missed_message_addresses_to_database( apps: StateApps, schema_editor: DatabaseSchemaEditor) -> None: redis_client = get_redis_client() MissedMessageEmailAddress = apps.get_model('zerver', 'MissedMessageEmailAddress') UserProfile = apps.get_model('zerver', 'UserProfile') Message = apps.get_model('zerver', 'Message') Recipient = apps.get_model('zerver', 'Recipient') RECIPIENT_PERSONAL = 1 RECIPIENT_STREAM = 2 all_mm_keys = redis_client.keys('missed_message:*') for key in all_mm_keys: # Don't migrate mm addresses that have already been used. if redis_client.hincrby(key, 'uses_left', -1) < 0: redis_client.delete(key) continue result = redis_client.hmget(key, 'user_profile_id', 'recipient_id', 'subject') if not all(val is not None for val in result): # Missing data, skip this key; this should never happen redis_client.delete(key) continue user_profile_id, recipient_id, subject_b = result # type: (bytes, bytes, bytes) topic_name = subject_b.decode('utf-8') # The data model for missed-message emails has changed in two # key ways: We're moving it from redis to the database for # better persistence, and also replacing the stream + topic # (as the reply location) with a message to reply to. Because # the redis data structure only had stream/topic pairs, we use # the following migration logic to find the latest message in # the thread indicated by the redis data (if it exists). try: user_profile = UserProfile.objects.get(id=user_profile_id) recipient = Recipient.objects.get(id=recipient_id) if recipient.type == RECIPIENT_STREAM: message = Message.objects.filter( subject__iexact=topic_name, recipient_id=recipient.id).latest('id') elif recipient.type == RECIPIENT_PERSONAL: # Tie to the latest PM from the sender to this user; # we expect at least one existed because it generated # this missed-message email, so we can skip the # normally required additioanl check for messages we # ourselves sent to the target user. message = Message.objects.filter( recipient_id=user_profile.recipient_id, sender_id=recipient.type_id).latest('id') else: message = Message.objects.filter( recipient_id=recipient.id).latest('id') except ObjectDoesNotExist: # If all messages in the original thread were deleted or # had their topics edited, we can't find an appropriate # message to tag; we just skip migrating this message. # The consequence (replies to this particular # missed-message email bouncing) is acceptable. redis_client.delete(key) continue # The timestamp will be set to the default (now) which means # the address will take longer to expire than it would have in # redis, but this small issue is probably worth the simplicity # of not having to figure out the precise timestamp. MissedMessageEmailAddress.objects.create( message=message, user_profile=user_profile, email_token=generate_missed_message_token()) # We successfully transferred this missed-message email's data # to the database, so this message can be deleted from redis. redis_client.delete(key)