def filter(self, record): # type: (logging.LogRecord) -> bool from django.conf import settings from django.core.cache import cache # Track duplicate errors duplicate = False rate = getattr(settings, '%s_LIMIT' % self.__class__.__name__.upper(), 600) # seconds if rate > 0: # Test if the cache works try: cache.set('RLF_TEST_KEY', 1, 1) use_cache = cache.get('RLF_TEST_KEY') == 1 except Exception: use_cache = False if use_cache: if record.exc_info is not None: tb = force_bytes('\n'.join( traceback.format_exception(*record.exc_info))) else: tb = force_bytes(u'%s' % (record, )) key = self.__class__.__name__.upper() + hashlib.sha1( tb).hexdigest() duplicate = cache.get(key) == 1 if not duplicate: cache.set(key, 1, rate) else: min_date = timezone_now() - timedelta(seconds=rate) duplicate = (self.last_error >= min_date) if not duplicate: self.last_error = timezone_now() return not duplicate
def filter(self, record): # type: (logging.LogRecord) -> bool from django.conf import settings from django.core.cache import cache # Track duplicate errors duplicate = False rate = getattr(settings, '%s_LIMIT' % self.__class__.__name__.upper(), 600) # seconds if rate > 0: # Test if the cache works try: cache.set('RLF_TEST_KEY', 1, 1) use_cache = cache.get('RLF_TEST_KEY') == 1 except Exception: use_cache = False if use_cache: if record.exc_info is not None: tb = force_bytes('\n'.join(traceback.format_exception(*record.exc_info))) else: tb = force_bytes(str(record)) key = self.__class__.__name__.upper() + hashlib.sha1(tb).hexdigest() duplicate = cache.get(key) == 1 if not duplicate: cache.set(key, 1, rate) else: min_date = timezone.now() - timedelta(seconds=rate) duplicate = (self.last_error >= min_date) if not duplicate: self.last_error = timezone.now() return not duplicate
def ccache_principal(name, realm): # type: (Dict[str, str], str) -> bytes header = struct.pack("!II", name["nameType"], len(name["nameString"])) return (header + ccache_counted_octet_string(force_bytes(realm)) + b"".join( ccache_counted_octet_string(force_bytes(c)) for c in name["nameString"]))
def der_encode_length(length): # type: (int) -> bytes if length <= 127: return force_bytes(chr(length)) out = b"" while length > 0: out = force_bytes(chr(length & 0xff)) + out length >>= 8 out = force_bytes(chr(len(out) | 0x80)) + out return out
def render_tex(tex, is_inline=True): # type: (Text, bool) -> Text """Render a TeX string into HTML using KaTeX Returns the HTML string, or None if there was some error in the TeX syntax Keyword arguments: tex -- Text string with the TeX to render Don't include delimiters ('$$', '\[ \]', etc.) is_inline -- Boolean setting that indicates whether the render should be inline (i.e. for embedding it in text) or not. The latter will show the content centered, and in the "expanded" form (default True) """ command = ['npm', 'run', '-s', 'katex'] if not is_inline: command.extend(['--', '--display-mode']) katex = subprocess.Popen(command, stdin=subprocess.PIPE, stdout=subprocess.PIPE) stdout = katex.communicate(input=force_bytes(tex))[0] if katex.returncode == 0: return stdout else: return None
def json_method_not_allowed(methods): # type: (List[text_type]) -> text_type resp = HttpResponseNotAllowed(methods) resp.content = force_bytes(ujson.dumps({"result": "error", "msg": "Method Not Allowed", "allowed_methods": methods})) return resp
def render_tex(tex, is_inline=True): # type: (Text, bool) -> Text """Render a TeX string into HTML using KaTeX Returns the HTML string, or None if there was some error in the TeX syntax Keyword arguments: tex -- Text string with the TeX to render Don't include delimiters ('$$', '\[ \]', etc.) is_inline -- Boolean setting that indicates whether the render should be inline (i.e. for embedding it in text) or not. The latter will show the content centered, and in the "expanded" form (default True) """ katex_path = os.path.join(settings.STATIC_ROOT, 'third/katex/cli.js') if not os.path.isfile(katex_path): logging.error("Cannot find KaTeX for latex rendering!") return None command = ['node', katex_path] if not is_inline: command.extend(['--', '--display-mode']) katex = subprocess.Popen(command, stdin=subprocess.PIPE, stdout=subprocess.PIPE, stderr=subprocess.PIPE) stdout = katex.communicate(input=force_bytes(tex))[0] if katex.returncode == 0: # stdout contains a newline at the end return stdout.decode('utf-8').strip() else: return None
def render_tex(tex, is_inline=True): # type: (Text, bool) -> Optional[Text] """Render a TeX string into HTML using KaTeX Returns the HTML string, or None if there was some error in the TeX syntax Keyword arguments: tex -- Text string with the TeX to render Don't include delimiters ('$$', '\[ \]', etc.) is_inline -- Boolean setting that indicates whether the render should be inline (i.e. for embedding it in text) or not. The latter will show the content centered, and in the "expanded" form (default True) """ katex_path = os.path.join(settings.STATIC_ROOT, 'third/katex/cli.js') if not os.path.isfile(katex_path): logging.error("Cannot find KaTeX for latex rendering!") return None command = ['node', katex_path] if not is_inline: command.extend(['--', '--display-mode']) katex = subprocess.Popen(command, stdin=subprocess.PIPE, stdout=subprocess.PIPE, stderr=subprocess.PIPE) stdout = katex.communicate(input=force_bytes(tex))[0] if katex.returncode == 0: # stdout contains a newline at the end assert stdout is not None return stdout.decode('utf-8').strip() else: return None
def _wrapped_func_arguments(request, *args, **kwargs): # type: (HttpRequest, *Any, **Any) -> HttpResponse # First try block attempts to get the credentials we need to do authentication try: # Grab the base64-encoded authentication string, decode it, and split it into # the email and API key auth_type, credentials = request.META[ 'HTTP_AUTHORIZATION'].split() # case insensitive per RFC 1945 if auth_type.lower() != "basic": return json_error( _("This endpoint requires HTTP basic authentication.")) role, api_key = base64.b64decode( force_bytes(credentials)).decode('utf-8').split(":") except ValueError: return json_unauthorized( _("Invalid authorization header for basic auth")) except KeyError: return json_unauthorized( "Missing authorization header for basic auth") # Now we try to do authentication or die try: # profile is a Union[UserProfile, RemoteZulipServer] profile = validate_api_key(request, role, api_key, is_webhook) except JsonableError as e: return json_unauthorized(e.msg) # Apply rate limiting return rate_limit()(view_func)(request, profile, *args, **kwargs)
def _wrapped_func_arguments(request, *args, **kwargs): # type: (HttpRequest, *Any, **Any) -> HttpResponse # First try block attempts to get the credentials we need to do authentication try: # Grab the base64-encoded authentication string, decode it, and split it into # the email and API key auth_type, credentials = request.META['HTTP_AUTHORIZATION'].split() # case insensitive per RFC 1945 if auth_type.lower() != "basic": return json_error(_("Only Basic authentication is supported.")) role, api_key = base64.b64decode(force_bytes(credentials)).decode('utf-8').split(":") except ValueError: json_error(_("Invalid authorization header for basic auth")) except KeyError: return json_unauthorized("Missing authorization header for basic auth") # Now we try to do authentication or die try: # Could be a UserProfile or a Deployment profile = validate_api_key(request, role, api_key, is_webhook) except JsonableError as e: return json_unauthorized(e.error) request.user = profile process_client(request, profile) if isinstance(profile, UserProfile): request._email = profile.email else: assert isinstance(profile, Deployment) request._email = "deployment:" + role profile.rate_limits = "" # Apply rate limiting return rate_limit()(view_func)(request, profile, *args, **kwargs)
def json_method_not_allowed(methods): # type: (List[Text]) -> Text resp = HttpResponseNotAllowed(methods) resp.content = force_bytes(ujson.dumps({"result": "error", "msg": "Method Not Allowed", "allowed_methods": methods})) return resp
def _wrapped_func_arguments(request, *args, **kwargs): # type: (HttpRequest, *Any, **Any) -> HttpResponse # First try block attempts to get the credentials we need to do authentication try: # Grab the base64-encoded authentication string, decode it, and split it into # the email and API key auth_type, credentials = request.META['HTTP_AUTHORIZATION'].split() # case insensitive per RFC 1945 if auth_type.lower() != "basic": return json_error(_("This endpoint requires HTTP basic authentication.")) role, api_key = base64.b64decode(force_bytes(credentials)).decode('utf-8').split(":") except ValueError: return json_unauthorized(_("Invalid authorization header for basic auth")) except KeyError: return json_unauthorized("Missing authorization header for basic auth") # Now we try to do authentication or die try: # profile is a Union[UserProfile, RemoteZulipServer] profile = validate_api_key(request, role, api_key, is_webhook) except JsonableError as e: return json_unauthorized(e.error) request.user = profile if is_remote_server(role): assert isinstance(profile, RemoteZulipServer) # type: ignore # https://github.com/python/mypy/issues/2957 request._email = "zulip-server:" + role profile.rate_limits = "" process_client(request, profile, remote_server_request=True) else: assert isinstance(profile, UserProfile) # type: ignore # https://github.com/python/mypy/issues/2957 request._email = profile.email process_client(request, profile) # Apply rate limiting return rate_limit()(view_func)(request, profile, *args, **kwargs)
def json_unauthorized(message, www_authenticate=None): # type: (Text, Optional[Text]) -> HttpResponse resp = HttpResponseUnauthorized("zulip", www_authenticate=www_authenticate) resp.content = force_bytes( ujson.dumps({ "result": "error", "msg": message }) + "\n") return resp
def _wrapped_view_func(request: HttpRequest, *args: Any, **kwargs: Any) -> HttpResponse: try: auth_type, encoded_value = request.META['HTTP_AUTHORIZATION'].split() # type: str, str if auth_type.lower() == "basic": email, api_key = base64.b64decode(force_bytes(encoded_value)).decode('utf-8').split(":") email = email.replace('%40', '@') credentials = u"%s:%s" % (email, api_key) encoded_credentials = force_str(base64.b64encode(credentials.encode('utf-8'))) request.META['HTTP_AUTHORIZATION'] = "Basic " + encoded_credentials except Exception: pass return view_func(request, *args, **kwargs)
def ensure_medium_avatar_image(self, user_profile: UserProfile) -> None: file_path = user_avatar_path(user_profile) s3_file_name = file_path bucket_name = settings.S3_AVATAR_BUCKET conn = S3Connection(settings.S3_KEY, settings.S3_SECRET_KEY) bucket = get_bucket(conn, bucket_name) key = bucket.get_key(file_path) image_data = force_bytes(key.get_contents_as_string()) resized_medium = resize_avatar(image_data, MEDIUM_AVATAR_SIZE) upload_image_to_s3(bucket_name, s3_file_name + "-medium.png", "image/png", user_profile, resized_medium)
def _wrapped_view_func(request, *args, **kwargs): # type: (HttpRequest, *Any, **Any) -> HttpResponse try: auth_type, encoded_value = request.META['HTTP_AUTHORIZATION'].split() # type: str, str if auth_type.lower() == "basic": email, api_key = base64.b64decode(force_bytes(encoded_value)).decode('utf-8').split(":") email = email.replace('%40', '@') credentials = u"%s:%s" % (email, api_key) encoded_credentials = force_str(base64.b64encode(credentials.encode('utf-8'))) request.META['HTTP_AUTHORIZATION'] = "Basic " + encoded_credentials except Exception: pass return view_func(request, *args, **kwargs)
def ensure_medium_avatar_image(self, email): # type: (Text) -> None user_profile = get_user_profile_by_email(email) email_hash = user_avatar_hash(email) s3_file_name = email_hash bucket_name = settings.S3_AVATAR_BUCKET conn = S3Connection(settings.S3_KEY, settings.S3_SECRET_KEY) bucket = get_bucket(conn, bucket_name) key = bucket.get_key(email_hash) image_data = force_bytes(key.get_contents_as_string()) resized_medium = resize_avatar(image_data, MEDIUM_AVATAR_SIZE) upload_image_to_s3(bucket_name, s3_file_name + "-medium.png", "image/png", user_profile, resized_medium)
def highlight_string_bytes_offsets(text, locs): # type: (AnyStr, Iterable[Tuple[int, int]]) -> Text string = force_bytes(text) highlight_start = b'<span class="highlight">' highlight_stop = b'</span>' pos = 0 result = b'' for loc in locs: (offset, length) = loc result += string[pos:offset] result += highlight_start result += string[offset:offset + length] result += highlight_stop pos = offset + length result += string[pos:] return force_text(result)
def highlight_string(text, locs): # type: (AnyStr, Iterable[Tuple[int, int]]) -> text_type string = force_bytes(text) # Do all operations on bytes because tsearch_extras counts bytes instead of characters. highlight_start = b'<span class="highlight">' highlight_stop = b'</span>' pos = 0 result = b'' for loc in locs: (offset, length) = loc result += string[pos:offset] result += highlight_start result += string[offset:offset + length] result += highlight_stop pos = offset + length result += string[pos:] return result.decode('utf-8')
def ensure_medium_avatar_image(self, user_profile): # type: (UserProfile) -> None file_path = user_avatar_path(user_profile) s3_file_name = file_path bucket_name = settings.S3_AVATAR_BUCKET conn = S3Connection(settings.S3_KEY, settings.S3_SECRET_KEY) bucket = get_bucket(conn, bucket_name) key = bucket.get_key(file_path) image_data = force_bytes(key.get_contents_as_string()) resized_medium = resize_avatar(image_data, MEDIUM_AVATAR_SIZE) upload_image_to_s3( bucket_name, s3_file_name + "-medium.png", "image/png", user_profile, resized_medium )
def der_encode_integer_value(val): # type: (int) -> bytes if not isinstance(val, six.integer_types): raise TypeError("int") # base 256, MSB first, two's complement, minimum number of octets # necessary. This has a number of annoying edge cases: # * 0 and -1 are 0x00 and 0xFF, not the empty string. # * 255 is 0x00 0xFF, not 0xFF # * -256 is 0xFF 0x00, not 0x00 # Special-case to avoid an empty encoding. if val == 0: return b"\x00" sign = 0 # What you would get if you sign-extended the current high bit. out = b"" # We can stop once sign-extension matches the remaining value. while val != sign: byte = val & 0xff out = force_bytes(chr(byte)) + out sign = -1 if byte & 0x80 == 0x80 else 0 val >>= 8 return out
def _wrapped_func_arguments(request, *args, **kwargs): # type: (HttpRequest, *Any, **Any) -> HttpResponse # First try block attempts to get the credentials we need to do authentication try: # Grab the base64-encoded authentication string, decode it, and split it into # the email and API key auth_type, credentials = request.META['HTTP_AUTHORIZATION'].split() # case insensitive per RFC 1945 if auth_type.lower() != "basic": return json_error(_("This endpoint requires HTTP basic authentication.")) role, api_key = base64.b64decode(force_bytes(credentials)).decode('utf-8').split(":") except ValueError: return json_unauthorized(_("Invalid authorization header for basic auth")) except KeyError: return json_unauthorized("Missing authorization header for basic auth") # Now we try to do authentication or die try: # profile is a Union[UserProfile, RemoteZulipServer] profile = validate_api_key(request, role, api_key, is_webhook) except JsonableError as e: return json_unauthorized(e.msg) # Apply rate limiting return rate_limit()(view_func)(request, profile, *args, **kwargs)
def ccache_principal(name, realm): # type: (Dict[str, str], str) -> bytes header = struct.pack("!II", name["nameType"], len(name["nameString"])) return (header + ccache_counted_octet_string(force_bytes(realm)) + b"".join(ccache_counted_octet_string(force_bytes(c)) for c in name["nameString"]))
def stringify_message_dict(message_dict): # type: (Dict[str, Any]) -> binary_type return zlib.compress(force_bytes(ujson.dumps(message_dict)))
def der_encode_tlv(tag, value): # type: (int, bytes) -> bytes return force_bytes(chr(tag)) + der_encode_length(len(value)) + value
def json_unauthorized(message, www_authenticate=None): # type: (Text, Optional[Text]) -> HttpResponse resp = HttpResponseUnauthorized("zulip", www_authenticate=www_authenticate) resp.content = force_bytes(ujson.dumps({"result": "error", "msg": message}) + "\n") return resp
def main(options): # type: (Any) -> int # yarn and management commands expect to be run from the root of the # project. os.chdir(ZULIP_PATH) # setup-apt-repo does an `apt-get update` # hash the apt dependencies sha_sum = hashlib.sha1() for apt_depedency in APT_DEPENDENCIES[codename]: sha_sum.update(apt_depedency.encode('utf8')) # hash the content of setup-apt-repo sha_sum.update(open('scripts/lib/setup-apt-repo').read().encode('utf8')) new_apt_dependencies_hash = sha_sum.hexdigest() last_apt_dependencies_hash = None apt_hash_file_path = os.path.join(UUID_VAR_PATH, "apt_dependencies_hash") try: hash_file = open(apt_hash_file_path, 'r+') last_apt_dependencies_hash = hash_file.read() except IOError: run(['touch', apt_hash_file_path]) hash_file = open(apt_hash_file_path, 'r+') if (new_apt_dependencies_hash != last_apt_dependencies_hash): try: install_apt_deps() except subprocess.CalledProcessError: # Might be a failure due to network connection issues. Retrying... print( WARNING + "`apt-get -y install` failed while installing dependencies; retrying..." + ENDC) # Since a common failure mode is for the caching in # `setup-apt-repo` to optimize the fast code path to skip # running `apt-get update` when the target apt repository # is out of date, we run it explicitly here so that we # recover automatically. run(['sudo', 'apt-get', 'update']) install_apt_deps() hash_file.write(new_apt_dependencies_hash) else: print("No changes to apt dependencies, so skipping apt operations.") # Here we install node. run(["sudo", "scripts/lib/install-node"]) # This is a wrapper around `yarn`, which we run last since # it can often fail due to network issues beyond our control. try: # Hack: We remove `node_modules` as root to work around an # issue with the symlinks being improperly owned by root. if os.path.islink("node_modules"): run(["sudo", "rm", "-f", "node_modules"]) if not os.path.isdir(NODE_MODULES_CACHE_PATH): run(["sudo", "mkdir", NODE_MODULES_CACHE_PATH]) run([ "sudo", "chown", "%s:%s" % (user_id, user_id), NODE_MODULES_CACHE_PATH ]) setup_node_modules(prefer_offline=True) except subprocess.CalledProcessError: print(WARNING + "`yarn install` failed; retrying..." + ENDC) setup_node_modules() # Import tools/setup_venv.py instead of running it so that we get an # activated virtualenv for the rest of the provisioning process. from tools.setup import setup_venvs setup_venvs.main(is_travis) setup_shell_profile('~/.bash_profile') setup_shell_profile('~/.zprofile') run(["sudo", "cp", REPO_STOPWORDS_PATH, TSEARCH_STOPWORDS_PATH]) # create log directory `zulip/var/log` run(["mkdir", "-p", LOG_DIR_PATH]) # create upload directory `var/uploads` run(["mkdir", "-p", UPLOAD_DIR_PATH]) # create test upload directory `var/test_upload` run(["mkdir", "-p", TEST_UPLOAD_DIR_PATH]) # create coverage directory`var/coverage` run(["mkdir", "-p", COVERAGE_DIR_PATH]) # create linecoverage directory`var/linecoverage-report` run(["mkdir", "-p", LINECOVERAGE_DIR_PATH]) # create linecoverage directory`var/node-coverage` run(["mkdir", "-p", NODE_TEST_COVERAGE_DIR_PATH]) # `build_emoji` script requires `emoji-datasource` package which we install # via npm and hence it should be executed after we are done installing npm # packages. if not os.path.isdir(EMOJI_CACHE_PATH): run(["sudo", "mkdir", EMOJI_CACHE_PATH]) run(["sudo", "chown", "%s:%s" % (user_id, user_id), EMOJI_CACHE_PATH]) run(["tools/setup/emoji/build_emoji"]) # copy over static files from the zulip_bots package run(["tools/setup/generate_zulip_bots_static_files"]) run(["tools/setup/build_pygments_data"]) run(["scripts/setup/generate_secrets.py", "--development"]) run(["tools/update-authors-json", "--use-fixture"]) run(["tools/inline-email-css"]) if is_travis and not options.is_production_travis: run(["sudo", "service", "rabbitmq-server", "restart"]) run(["sudo", "service", "redis-server", "restart"]) run(["sudo", "service", "memcached", "restart"]) elif options.is_docker: run(["sudo", "service", "rabbitmq-server", "restart"]) run(["sudo", "pg_dropcluster", "--stop", POSTGRES_VERSION, "main"]) run([ "sudo", "pg_createcluster", "-e", "utf8", "--start", POSTGRES_VERSION, "main" ]) run(["sudo", "service", "redis-server", "restart"]) run(["sudo", "service", "memcached", "restart"]) if not options.is_production_travis: # The following block is skipped for the production Travis # suite, because that suite doesn't make use of these elements # of the development environment (it just uses the development # environment to build a release tarball). # Need to set up Django before using is_template_database_current os.environ.setdefault("DJANGO_SETTINGS_MODULE", "zproject.settings") import django django.setup() from zerver.lib.str_utils import force_bytes from zerver.lib.test_fixtures import is_template_database_current try: from zerver.lib.queue import SimpleQueueClient SimpleQueueClient() rabbitmq_is_configured = True except Exception: rabbitmq_is_configured = False if options.is_force or not rabbitmq_is_configured: run(["scripts/setup/configure-rabbitmq"]) else: print("RabbitMQ is already configured.") migration_status_path = os.path.join(UUID_VAR_PATH, "migration_status_dev") if options.is_force or not is_template_database_current( migration_status=migration_status_path, settings="zproject.settings", database_name="zulip", ): run(["tools/setup/postgres-init-dev-db"]) run(["tools/do-destroy-rebuild-database"]) else: print("No need to regenerate the dev DB.") if options.is_force or not is_template_database_current(): run(["tools/setup/postgres-init-test-db"]) run(["tools/do-destroy-rebuild-test-database"]) else: print("No need to regenerate the test DB.") # Consider updating generated translations data: both `.mo` # files and `language-options.json`. sha1sum = hashlib.sha1() paths = ['zerver/management/commands/compilemessages.py'] paths += glob.glob('static/locale/*/LC_MESSAGES/*.po') paths += glob.glob('static/locale/*/translations.json') for path in paths: with open(path, 'r') as file_to_hash: sha1sum.update(force_bytes(file_to_hash.read())) compilemessages_hash_path = os.path.join(UUID_VAR_PATH, "last_compilemessages_hash") new_compilemessages_hash = sha1sum.hexdigest() run(['touch', compilemessages_hash_path]) with open(compilemessages_hash_path, 'r') as hash_file: last_compilemessages_hash = hash_file.read() if options.is_force or (new_compilemessages_hash != last_compilemessages_hash): with open(compilemessages_hash_path, 'w') as hash_file: hash_file.write(new_compilemessages_hash) run(["./manage.py", "compilemessages"]) else: print("No need to run `manage.py compilemessages`.") version_file = os.path.join(UUID_VAR_PATH, 'provision_version') print('writing to %s\n' % (version_file, )) open(version_file, 'w').write(PROVISION_VERSION + '\n') print() print(OKBLUE + "Zulip development environment setup succeeded!" + ENDC) return 0