Example #1
0
def proxy_env_vars(monkeypatch, request):
    for schema in getproxies_environment().keys():
        monkeypatch.delenv(f"{schema}_proxy", False)

    for proxy_type, proxy_list in request.param.items():
        monkeypatch.setenv(proxy_type, proxy_list)

    return request.param
Example #2
0
    def __init__(
        self,
        reactor,
        proxy_reactor=None,
        contextFactory: Optional[IPolicyForHTTPS] = None,
        connectTimeout=None,
        bindAddress=None,
        pool=None,
        use_proxy=False,
    ):
        contextFactory = contextFactory or BrowserLikePolicyForHTTPS()

        _AgentBase.__init__(self, reactor, pool)

        if proxy_reactor is None:
            self.proxy_reactor = reactor
        else:
            self.proxy_reactor = proxy_reactor

        self._endpoint_kwargs = {}
        if connectTimeout is not None:
            self._endpoint_kwargs["timeout"] = connectTimeout
        if bindAddress is not None:
            self._endpoint_kwargs["bindAddress"] = bindAddress

        http_proxy = None
        https_proxy = None
        no_proxy = None
        if use_proxy:
            proxies = getproxies_environment()
            http_proxy = proxies["http"].encode(
            ) if "http" in proxies else None
            https_proxy = proxies["https"].encode(
            ) if "https" in proxies else None
            no_proxy = proxies["no"] if "no" in proxies else None

        # Parse credentials from https proxy connection string if present
        self.https_proxy_creds, https_proxy = parse_username_password(
            https_proxy)

        self.http_proxy_endpoint = http_proxy_endpoint(http_proxy,
                                                       self.proxy_reactor,
                                                       **self._endpoint_kwargs)

        self.https_proxy_endpoint = http_proxy_endpoint(
            https_proxy, self.proxy_reactor, **self._endpoint_kwargs)

        self.no_proxy = no_proxy

        self._policy_for_https = contextFactory
        self._reactor = reactor
Example #3
0
    def __init__(
        self,
        reactor: IReactorCore,
        proxy_reactor: Optional[ISynapseReactor] = None,
        contextFactory: Optional[IPolicyForHTTPS] = None,
        connectTimeout: Optional[float] = None,
        bindAddress: Optional[bytes] = None,
        pool: Optional[HTTPConnectionPool] = None,
        use_proxy: bool = False,
    ):
        contextFactory = contextFactory or BrowserLikePolicyForHTTPS()

        _AgentBase.__init__(self, reactor, pool)

        if proxy_reactor is None:
            self.proxy_reactor = reactor
        else:
            self.proxy_reactor = proxy_reactor

        self._endpoint_kwargs: Dict[str, Any] = {}
        if connectTimeout is not None:
            self._endpoint_kwargs["timeout"] = connectTimeout
        if bindAddress is not None:
            self._endpoint_kwargs["bindAddress"] = bindAddress

        http_proxy = None
        https_proxy = None
        no_proxy = None
        if use_proxy:
            proxies = getproxies_environment()
            http_proxy = proxies["http"].encode(
            ) if "http" in proxies else None
            https_proxy = proxies["https"].encode(
            ) if "https" in proxies else None
            no_proxy = proxies["no"] if "no" in proxies else None

        self.http_proxy_endpoint, self.http_proxy_creds = http_proxy_endpoint(
            http_proxy, self.proxy_reactor, contextFactory,
            **self._endpoint_kwargs)

        self.https_proxy_endpoint, self.https_proxy_creds = http_proxy_endpoint(
            https_proxy, self.proxy_reactor, contextFactory,
            **self._endpoint_kwargs)

        self.no_proxy = no_proxy

        self._policy_for_https = contextFactory
        self._reactor = reactor
Example #4
0
    def __init__(
        self,
        reactor,
        proxy_reactor=None,
        contextFactory=BrowserLikePolicyForHTTPS(),
        connectTimeout=None,
        bindAddress=None,
        pool=None,
        use_proxy=False,
    ):
        _AgentBase.__init__(self, reactor, pool)

        if proxy_reactor is None:
            self.proxy_reactor = reactor
        else:
            self.proxy_reactor = proxy_reactor

        self._endpoint_kwargs = {}
        if connectTimeout is not None:
            self._endpoint_kwargs["timeout"] = connectTimeout
        if bindAddress is not None:
            self._endpoint_kwargs["bindAddress"] = bindAddress

        http_proxy = None
        https_proxy = None
        no_proxy = None
        if use_proxy:
            proxies = getproxies_environment()
            http_proxy = proxies["http"].encode(
            ) if "http" in proxies else None
            https_proxy = proxies["https"].encode(
            ) if "https" in proxies else None
            no_proxy = proxies["no"] if "no" in proxies else None

        self.http_proxy_endpoint = _http_proxy_endpoint(
            http_proxy, self.proxy_reactor, **self._endpoint_kwargs)

        self.https_proxy_endpoint = _http_proxy_endpoint(
            https_proxy, self.proxy_reactor, **self._endpoint_kwargs)

        self.no_proxy = no_proxy

        self._policy_for_https = contextFactory
        self._reactor = reactor
Example #5
0
def post_multipart_formdata(host, url, fields, ssl=False, *, accept_type=None):
    content_type, body = encode_multipart_formdata(fields)
    from urllib import request
    proxies = request.getproxies_environment()
    try:
        realhost = proxies["http"]
    except KeyError:
        realhost = host
    from http.client import HTTPConnection, HTTPSConnection
    if ssl:
        h = HTTPSConnection(realhost)
    else:
        h = HTTPConnection(realhost)
    headers = {'Content-type': content_type}
    if accept_type is not None:
        headers['Accept'] = accept_type
    h.request('POST', url, body=body, headers=headers)
    r = h.getresponse()
    return r.status, r.msg, r.getheaders(), r.read()
Example #6
0
    def __init__(
        self,
        reactor: IReactorCore,
        proxy_reactor: IReactorCore,
        tls_client_options_factory: Optional[FederationPolicyForHTTPS],
        srv_resolver: SrvResolver,
        parsed_uri: URI,
    ):
        self._reactor = reactor
        self._parsed_uri = parsed_uri

        # http_proxy is not needed because federation is always over TLS
        proxies = getproxies_environment()
        https_proxy = proxies["https"].encode() if "https" in proxies else None
        self.no_proxy = proxies["no"] if "no" in proxies else None

        # endpoint and credentials to use to connect to the outbound https proxy, if any.
        (
            self._https_proxy_endpoint,
            self._https_proxy_creds,
        ) = proxyagent.http_proxy_endpoint(
            https_proxy,
            proxy_reactor,
            tls_client_options_factory,
        )

        # set up the TLS connection params
        #
        # XXX disabling TLS is really only supported here for the benefit of the
        # unit tests. We should make the UTs cope with TLS rather than having to make
        # the code support the unit tests.

        if tls_client_options_factory is None:
            self._tls_options = None
        else:
            self._tls_options = tls_client_options_factory.get_options(
                self._parsed_uri.host)

        self._srv_resolver = srv_resolver
Example #7
0
    def read_config(self, config: JsonDict, **kwargs: Any) -> None:

        # Only enable the media repo if either the media repo is enabled or the
        # current worker app is the media repo.
        if (self.root.server.enable_media_repo is False and
                config.get("worker_app") != "synapse.app.media_repository"):
            self.can_load_media_repo = False
            return
        else:
            self.can_load_media_repo = True

        # Whether this instance should be the one to run the background jobs to
        # e.g clean up old URL previews.
        self.media_instance_running_background_jobs = config.get(
            "media_instance_running_background_jobs", )

        self.max_upload_size = self.parse_size(
            config.get("max_upload_size", "50M"))
        self.max_image_pixels = self.parse_size(
            config.get("max_image_pixels", "32M"))
        self.max_spider_size = self.parse_size(
            config.get("max_spider_size", "10M"))

        self.media_store_path = self.ensure_directory(
            config.get("media_store_path", "media_store"))

        backup_media_store_path = config.get("backup_media_store_path")

        synchronous_backup_media_store = config.get(
            "synchronous_backup_media_store", False)

        storage_providers = config.get("media_storage_providers", [])

        if backup_media_store_path:
            if storage_providers:
                raise ConfigError(
                    "Cannot use both 'backup_media_store_path' and 'storage_providers'"
                )

            storage_providers = [{
                "module": "file_system",
                "store_local": True,
                "store_synchronous": synchronous_backup_media_store,
                "store_remote": True,
                "config": {
                    "directory": backup_media_store_path
                },
            }]

        # This is a list of config that can be used to create the storage
        # providers. The entries are tuples of (Class, class_config,
        # MediaStorageProviderConfig), where Class is the class of the provider,
        # the class_config the config to pass to it, and
        # MediaStorageProviderConfig are options for StorageProviderWrapper.
        #
        # We don't create the storage providers here as not all workers need
        # them to be started.
        self.media_storage_providers: List[tuple] = []

        for i, provider_config in enumerate(storage_providers):
            # We special case the module "file_system" so as not to need to
            # expose FileStorageProviderBackend
            if provider_config["module"] == "file_system":
                provider_config["module"] = (
                    "synapse.rest.media.v1.storage_provider"
                    ".FileStorageProviderBackend")

            provider_class, parsed_config = load_module(
                provider_config, ("media_storage_providers", "<item %i>" % i))

            wrapper_config = MediaStorageProviderConfig(
                provider_config.get("store_local", False),
                provider_config.get("store_remote", False),
                provider_config.get("store_synchronous", False),
            )

            self.media_storage_providers.append(
                (provider_class, parsed_config, wrapper_config))

        self.dynamic_thumbnails = config.get("dynamic_thumbnails", False)
        self.thumbnail_requirements = parse_thumbnail_requirements(
            config.get("thumbnail_sizes", DEFAULT_THUMBNAIL_SIZES))
        self.url_preview_enabled = config.get("url_preview_enabled", False)
        if self.url_preview_enabled:
            try:
                check_requirements("url_preview")

            except DependencyException as e:
                raise ConfigError(
                    e.
                    message  # noqa: B306, DependencyException.message is a property
                )

            proxy_env = getproxies_environment()
            if "url_preview_ip_range_blacklist" not in config:
                if "http" not in proxy_env or "https" not in proxy_env:
                    raise ConfigError(
                        "For security, you must specify an explicit target IP address "
                        "blacklist in url_preview_ip_range_blacklist for url previewing "
                        "to work")
            else:
                if "http" in proxy_env or "https" in proxy_env:
                    logger.warning("".join(HTTP_PROXY_SET_WARNING))

            # we always blacklist '0.0.0.0' and '::', which are supposed to be
            # unroutable addresses.
            self.url_preview_ip_range_blacklist = generate_ip_set(
                config["url_preview_ip_range_blacklist"],
                ["0.0.0.0", "::"],
                config_path=("url_preview_ip_range_blacklist", ),
            )

            self.url_preview_ip_range_whitelist = generate_ip_set(
                config.get("url_preview_ip_range_whitelist", ()),
                config_path=("url_preview_ip_range_whitelist", ),
            )

            self.url_preview_url_blacklist = config.get(
                "url_preview_url_blacklist", ())

            self.url_preview_accept_language = config.get(
                "url_preview_accept_language") or ["en"]

        media_retention = config.get("media_retention") or {}

        self.media_retention_local_media_lifetime_ms = None
        local_media_lifetime = media_retention.get("local_media_lifetime")
        if local_media_lifetime is not None:
            self.media_retention_local_media_lifetime_ms = self.parse_duration(
                local_media_lifetime)

        self.media_retention_remote_media_lifetime_ms = None
        remote_media_lifetime = media_retention.get("remote_media_lifetime")
        if remote_media_lifetime is not None:
            self.media_retention_remote_media_lifetime_ms = self.parse_duration(
                remote_media_lifetime)
Example #8
0
File: test.py Project: yaohq/beauty
def  test4():
    print(string.printable)
    print(string.punctuation)
    print(string)
    print(request.getproxies_environment())