def get_data_uri(bucket, sibling=None, more_params=None): """ Compute uri to data source as sibling to the current path. Add "from" and "to" query parameters from bucket. """ more_params = more_params or {} forward_parameters = [u'from', u'to', u'exclude', u'include', u'pad', u'backfill', u'interpolate'] request = bucket.request # Honor X-Forwarded-Proto request header if behind SSL-terminating HTTP proxy twisted_honor_reverse_proxy(request) url = URL() for param in forward_parameters: if param in bucket.tdata: url = url.add(unicode(param), unicode(bucket.tdata[param])) for param, value in more_params.iteritems(): # Special rule: Don't add any of "pad" or "backfill", if "interpolate" is true do_interpolate = 'interpolate' in bucket.tdata and asbool(bucket.tdata.interpolate) if do_interpolate and param in ['pad', 'backfill']: continue url = url.add(unicode(param), unicode(value)) data_uri = str(request.URLPath().sibling(sibling).click(url.asText())) return data_uri
def get_data_uri(bucket, sibling=None, more_params=None): """ Compute uri to data source as sibling to the current path. Add "from" and "to" query parameters from bucket. """ more_params = more_params or {} forward_parameters = [ u'from', u'to', u'exclude', u'include', u'pad', u'backfill', u'interpolate' ] request = bucket.request # Honor X-Forwarded-Proto request header if behind SSL-terminating HTTP proxy twisted_honor_reverse_proxy(request) url = URL() for param in forward_parameters: if param in bucket.tdata: url = url.add(unicode(param), unicode(bucket.tdata[param])) for param, value in more_params.iteritems(): # Special rule: Don't add any of "pad" or "backfill", if "interpolate" is true do_interpolate = 'interpolate' in bucket.tdata and asbool( bucket.tdata.interpolate) if do_interpolate and param in ['pad', 'backfill']: continue url = url.add(unicode(param), unicode(value)) data_uri = str(request.URLPath().sibling(sibling).click(url.asText())) return data_uri
def network_kubernetes_from_context( reactor, context=None, path=None, environ=None, default_config_path=FilePath(expanduser(u"~/.kube/config")), ): """ Create a new ``IKubernetes`` provider based on a kube config file. :param reactor: A Twisted reactor which will be used for I/O and scheduling. :param unicode context: The name of the kube config context from which to load configuration details. Or, ``None`` to respect the current context setting from the configuration. :param FilePath path: The location of the kube config file to use. :param dict environ: A environment direction in which to look up ``KUBECONFIG``. If ``None``, the real process environment will be inspected. This is used only if ``path`` is ``None``. :return IKubernetes: The Kubernetes service described by the named context. """ if path is None: if environ is None: from os import environ try: kubeconfigs = environ[u"KUBECONFIG"] except KeyError: config = KubeConfig.from_file(default_config_path.path) else: config = _merge_configs_from_env(kubeconfigs) else: config = KubeConfig.from_file(path.path) if context is None: context = config.doc[u"current-context"] context = config.contexts[context] cluster = config.clusters[context[u"cluster"]] user = config.users[context[u"user"]] if isinstance(cluster[u"server"], bytes): base_url = URL.fromText(cluster[u"server"].decode("ascii")) else: base_url = URL.fromText(cluster[u"server"]) [ca_cert] = parse(cluster[u"certificate-authority"].bytes()) client_chain = parse(user[u"client-certificate"].bytes()) [client_key] = parse(user[u"client-key"].bytes()) agent = authenticate_with_certificate_chain( reactor, base_url, client_chain, client_key, ca_cert, ) return network_kubernetes( base_url=base_url, agent=agent, )
def test_challenge_unexpected_uri(self): """ ``_check_challenge`` raises `~acme.errors.UnexpectedUpdate` if the challenge does not have the expected URI. """ # Crazy dance that was used in previous test. url1 = URL.fromText(u'https://example.org/').asURI().asText() url2 = URL.fromText(u'https://example.com/').asURI().asText() with self.assertRaises(errors.UnexpectedUpdate): Client._check_challenge( challenge=messages.ChallengeResource( body=messages.ChallengeBody(chall=None, uri=url1)), challenge_body=messages.ChallengeBody(chall=None, uri=url2), )
def create_issuing_service( reactor: IReactorTCP, acme_url: str, account_key_file: str, well_known_resource: IResource, ) -> AcmeIssuingService: """Create an ACME issuing service, and attach it to a web Resource Args: reactor: twisted reactor acme_url: URL to use to request certificates account_key_file: where to store the account key well_known_resource: web resource for .well-known. we will attach a child resource for "acme-challenge". Returns: AcmeIssuingService """ responder = HTTP01Responder() well_known_resource.putChild(b"acme-challenge", responder.resource) store = ErsatzStore() return AcmeIssuingService( cert_store=store, client_creator=(lambda: Client.from_url( reactor=reactor, url=URL.from_text(acme_url), key=load_or_create_client_key(account_key_file), alg=RS256, )), clock=reactor, responders=[responder], )
def loginSubmit(self, request): username = self.queryValue(request, u"username") password = self.queryValue(request, u"password", default=u"") if username is None: user = None else: user = yield self.lookupUserName(username) if user is None: user = yield self.lookupUserEmail(username) if user is None: self.log.debug("Login failed: no such user: {username}", username=username) else: authenticated = yield self.verifyCredentials(user, password) if authenticated: session = request.getSession() session.user = user url = self.queryValue(request, u"o") if url is None: location = URLs.prefix # Default to application home else: location = URL.fromText(url) returnValue(self.redirect(request, location)) else: self.log.debug( "Login failed: incorrect credentials for user: {user}", user=user) returnValue((yield self.login(request, failed=True)))
def test_parser(self): """ ``AcmeParser`` creates an endpoint with the specified ACME directory and directory store. """ directory = URL.fromText(u'https://example.com/acme') parser = _AcmeParser(u'prefix', directory) tempdir = self.useFixture(TempDir()).path temp_path = FilePath(tempdir) key_path = temp_path.child('client.key') reactor = object() self.assertThat( parser.parseStreamServer(reactor, tempdir, 'tcp', '443'), MatchesAll( IsInstance(AutoTLSEndpoint), MatchesStructure( reactor=Is(reactor), directory=Equals(directory), cert_store=MatchesAll( IsInstance(DirectoryStore), MatchesStructure( path=Equals(temp_path))), cert_mapping=MatchesAll( IsInstance(HostDirectoryMap), MatchesStructure( directoryPath=Equals(temp_path))), sub_endpoint=MatchesPredicate( IStreamServerEndpoint.providedBy, '%r is not a stream server endpoint')))) self.assertThat(key_path.isfile(), Equals(True)) key_data = key_path.getContent() parser.parseStreamServer(reactor, tempdir, 'tcp', '443'), self.assertThat(key_path.getContent(), Equals(key_data))
def test_posted(self): root = Resource() collector = Collector() root.putChild(b"foo", collector) from twisted.internet import reactor while True: try: port = reactor.listenTCP(0, Site(root)) except: pass else: self.addCleanup(port.stopListening) port_number = port.getHost().port break fluentd_url = URL( scheme="http", host="127.0.0.1", port=port_number, path=["foo"], ) agent = Agent(reactor) destination = FluentdDestination(agent, fluentd_url) destination({"hello": "world"}) def check(): self.assertEquals(collector.collected, [b'json={"hello": "world"}']) return deferLater(reactor, 0.1, check)
def create_marathon_acme(storage_dir, acme_directory, acme_email, marathon_addrs, mlb_addrs, group, reactor): """ Create a marathon-acme instance. :param storage_dir: Path to the storage directory for certificates and the client key. :param acme_directory: Address for the ACME directory to use. :param acme_email: Email address to use when registering with the ACME service. :param marathon_addr: Address for the Marathon instance to find app domains that require certificates. :param mlb_addrs: List of addresses for marathon-lb instances to reload when a new certificate is issued. :param group: The marathon-lb group (``HAPROXY_GROUP``) to consider when finding app domains. :param reactor: The reactor to use. """ storage_path, certs_path = init_storage_dir(storage_dir) acme_url = URL.fromText(_to_unicode(acme_directory)) key = maybe_key(storage_path) return MarathonAcme(MarathonClient(marathon_addrs, reactor=reactor), group, DirectoryStore(certs_path), MarathonLbClient(mlb_addrs, reactor=reactor), create_txacme_client_creator(reactor, acme_url, key), reactor, acme_email)
def get_kubernetes_service(self, reactor): if self["k8s-service-account"]: return network_kubernetes( # XXX is this really the url to use? base_url=URL.fromText(self["kubernetes"].decode("ascii")), agent=authenticate_with_serviceaccount(reactor), ) return network_kubernetes_from_context(reactor, self["k8s-context"], self["k8s-config"])
def from_service_account(cls): from twisted.internet import reactor kubernetes = network_kubernetes( base_url=URL.fromText(u"https://kubernetes/"), agent=authenticate_with_serviceaccount(reactor), ) client = kubernetes.client() return cls(k8s=client)
def _parse_fluentd_http(self, kind, args): return lambda reactor: FluentdDestination( # Construct the pool ourselves with the default of using # persistent connections to override Agent's default of not using # persistent connections. agent=Agent(reactor, pool=HTTPConnectionPool(reactor)), fluentd_url=URL.fromText(args), )
def _reconstitute(self): """ Reconstitute this L{URLPath} from all its given attributes. """ urltext = urlquote( urlparse.urlunsplit((self._scheme, self._netloc, self._path, self._query, self._fragment)), safe=_allascii ) self._url = _URL.fromText(urltext.encode("ascii").decode("ascii"))
def render(self, request): # request.args can include URL encoded bodies, so extract the # query from request.uri _, _, query = request.uri.partition(b'?') # Assume HTTPS is served over 443 httpsURL = URL( scheme=u'https', # I'm sure ASCII will be fine. host=request.getRequestHostname().decode('ascii'), path=tuple(segment.decode('ascii') for segment in request.prepath + request.postpath), ) httpsLocation = httpsURL.asText().encode('ascii') if query: httpsLocation += (b'?' + query) return movedTo(request, httpsLocation)
def __init__(self): base_url = URL.fromText(u"https://kubernetes.example.invalid./") self._state = _KubernetesState() self._resource = _kubernetes_resource(self) self._kubernetes = network_kubernetes( base_url=base_url, agent=RequestTraversalAgent(self._resource), )
def render(self, request): # request.args can include URL encoded bodies, so extract the # query from request.uri _, _, query = request.uri.partition(b'?') # Assume HTTPS is served over 443 httpsURL = URL( scheme=u'https', # I'm sure ASCII will be fine. host=request.getRequestHostname().decode('ascii'), path=tuple( segment.decode('ascii') for segment in request.prepath + request.postpath), ) httpsLocation = httpsURL.asText().encode('ascii') if query: httpsLocation += (b'?' + query) return movedTo(request, httpsLocation)
def _reconstitute(self): """ Reconstitute this L{URLPath} from all its given attributes. """ urltext = urlquote(urlparse.urlunsplit( (self._scheme, self._netloc, self._path, self._query, self._fragment)), safe=_allascii) self._url = _URL.fromText(urltext.encode("ascii").decode("ascii"))
def test_list_logging(self, logger): """ ``_NetworkClient.list`` logs an Eliot event describing its given type. """ client = network_kubernetes( base_url=URL.fromText(u"http://127.0.0.1/"), agent=Agent(MemoryReactor()), ).client() client.list(v1.Pod)
def _url(self, *segments, **query): url = URL.fromText( self.endpoint.decode("utf-8"), ).child(*segments) for k, v in query.items(): url = url.add( k.decode("utf-8"), quote(v.encode("utf-8"), safe="").decode("ascii"), ) return url.asURI().asText().encode("ascii")
def bootstrapResource(self, request): requestURL = URL.fromText(request.uri) # Remove URL prefix names = requestURL.path[len(URLs.bootstrapBase.path) - 1:] request.setHeader(HeaderName.contentType.value, ContentType.CSS.value) return self.cachedZippedResource(request, self.bootstrapSourceURL, self.bootstrapVersion, self.bootstrapVersion, *names)
def test_generated( self, bucket_name, key_prefix, publichost, privatehost, s3_access_key_id, s3_secret_key, log_gatherer_furl, stats_gatherer_furl, ): """ New introducer and storage configuration can be created with ``new_tahoe_configuration``. """ deploy_config = DeploymentConfiguration( domain=u"testing.com", kubernetes_namespace=u"testing", subscription_manager_endpoint=URL.fromText(u"http://localhost/"), s3_access_key_id=s3_access_key_id, s3_secret_key=s3_secret_key, introducer_image=u"tahoe-introducer", storageserver_image=u"tahoe-storageserver", log_gatherer_furl=log_gatherer_furl, stats_gatherer_furl=stats_gatherer_furl, ) config = server.new_tahoe_configuration( deploy_config, bucket_name, key_prefix, publichost, privatehost, 4321, 1234, ) # It returns an object which can be round-tripped through the # JSON format. self.expectThat(config, Equals(loads(dumps(config)))) # The introducer and storage are both told the same introducer # furl. self.expectThat( config["introducer"]["introducer_furl"], Equals(config["storage"]["introducer_furl"]), ) # And ports are what we said. self.expectThat(config["introducer"]["port"], Equals(4321)) self.expectThat(config["storage"]["port"], Equals(1234)) # The introducer furl is contains a location hint of the # public host and the hard-coded introducer port we use. self.expectThat( config["introducer"]["introducer_furl"], hasLocationHint(config["storage"]["publichost"], 4321), )
def gotRTM(response): url = URL.fromText(response[u'url']) if url.scheme != u'wss': raise RuntimeError(url) factory.setSessionParameters( response[u'url'], useragent=factory.useragent) self.me = response[u'self'] self.users = {u[u'id']: u for u in response[u'users']} self.channels = {c[u'id']: c for c in response[u'channels']} self.ims = {im[u'id']: im for im in response[u'ims']} print factory return self._makeEndpoint(url).connect(factory)
def setUp(self): super(EndpointTests, self).setUp() clock = Clock() clock.rightNow = ( datetime.now() - datetime(1970, 1, 1)).total_seconds() client = FakeClient(RSA_KEY_512, clock) self.endpoint = AutoTLSEndpoint( reactor=clock, directory=URL.fromText(u'https://example.com/'), client_creator=lambda reactor, directory: succeed(client), cert_store=MemoryStore(), cert_mapping={}, sub_endpoint=DummyEndpoint())
def create_session(endpoint, request=treq.request): """ Create a new `Session` instance. :param unicode endpoint: URI to the root of the Documint service. :param request: Callable for making requests. :type request: Callable mimicing the signature of `treq.request`. """ uri = URL.fromText(endpoint).child(u'sessions').child(u'') request = documint_request_factory(request) d = post_json(request, uri.asURI().asText().encode('utf-8')) d.addCallback(itemgetter(u'links')) d.addCallback(Session, request) return d
def _addSlash(request): """ Add a trailing slash to C{request}'s URI. @param request: The incoming request to add the ending slash to. @type request: An object conforming to L{twisted.web.iweb.IRequest} @return: A URI with a trailing slash, with query and fragment preserved. @rtype: L{bytes} """ url = URL.fromText(request.uri.decode('ascii')) # Add an empty path segment at the end, so that it adds a trailing slash url = url.replace(path=list(url.path) + [u""]) return url.asText().encode('ascii')
def test_fluentd_http(self): """ A ``fluentd_http:`` description causes logs to be sent to a Fluentd server's http input plugin at the given URL. """ reactor = object() self.assertThat( _parse_destination_description("fluentd_http:http://foo/bar")( reactor), MatchesStructure( agent=IsInstance(Agent), fluentd_url=Equals( URL(scheme=u"http", host=u"foo", path=[u"bar"]), ), ))
def __init__(self, agent, site_name, secret_key, chargebee_domain): authorization = b64encode(secret_key + ":") self._agent = AuthenticatingAgent( agent, ("Authorization", "Basic {}".format(authorization)), ) self._site_name = site_name self._chargebee_domain = chargebee_domain self._uri = URL( u"https", u"{}.{}".format(self._site_name, self._chargebee_domain), [u"api", u"v2", u"estimates", u"create_subscription"], ) msg("Proxying to {}".format(self._uri))
def _make_wormhole_claim(self, customer_email, customer_id, subscription_id, old_secrets): plan_identifier = u"foobar" reactor = Clock() provisioner = get_provisioner( reactor, URL.fromText(u"http://subscription-manager/"), partial(self._provision_subscription, old_secrets), ) server = MemoryWormholeServer() signup = get_wormhole_signup( reactor, provisioner, server, URL.fromText(u"ws://foo.invalid/"), FilePath(self.mktemp()), ) d = signup.signup( customer_email, customer_id, subscription_id, plan_identifier, ) return self.successResultOf(d)
def __init__(self, scheme=b'', netloc=b'localhost', path=b'', query=b'', fragment=b''): self.scheme = scheme or b'http' self.netloc = netloc self.path = path or b'/' self.query = query self.fragment = fragment urltext = urlquote(urlparse.urlunsplit( (self.scheme, self.netloc, self.path, self.query, self.fragment)), safe=_allascii) self._url = _URL.fromText(urltext.encode("ascii").decode("ascii"))
def test_start_responding(self, token): """ Calling ``start_responding`` makes an appropriate resource available. """ challenge = challenges.HTTP01(token=token) response = challenge.response(RSA_KEY_512) responder = HTTP01Responder() challenge_resource = Resource() challenge_resource.putChild(b'acme-challenge', responder.resource) root = Resource() root.putChild(b'.well-known', challenge_resource) client = StubTreq(root) encoded_token = challenge.encode('token') challenge_url = URL( host=u'example.com', path=[u'.well-known', u'acme-challenge', encoded_token]).asText() self.assertThat(client.get(challenge_url), succeeded(MatchesStructure(code=Equals(404)))) responder.start_responding(u'example.com', challenge, response) self.assertThat( client.get(challenge_url), succeeded( MatchesAll( MatchesStructure(code=Equals(200), headers=AfterPreprocessing( methodcaller('getRawHeaders', b'content-type'), Equals([b'text/plain']))), AfterPreprocessing( methodcaller('content'), succeeded( Equals(response.key_authorization.encode( 'utf-8'))))))) # Starting twice before stopping doesn't break things responder.start_responding(u'example.com', challenge, response) self.assertThat(client.get(challenge_url), succeeded(MatchesStructure(code=Equals(200)))) responder.stop_responding(u'example.com', challenge, response) self.assertThat(client.get(challenge_url), succeeded(MatchesStructure(code=Equals(404))))
def authenticate_with_serviceaccount(reactor, **kw): """ Create an ``IAgent`` which can issue authenticated requests to a particular Kubernetes server using a service account token. :param reactor: The reactor with which to configure the resulting agent. :param bytes path: The location of the service account directory. The default should work fine for normal use within a container. :return IAgent: An agent which will authenticate itself to a particular Kubernetes server and which will verify that server or refuse to interact with it. """ config = KubeConfig.from_service_account(**kw) token = config.user["token"] base_url = URL.fromText(config.cluster["server"].decode("ascii")) ca_certs = pem.parse(config.cluster["certificate-authority"].bytes()) if not ca_certs: raise ValueError("No certificate authority certificate found.") ca_cert = ca_certs[0] try: # Validate the certificate so we have early failures for garbage data. ssl.Certificate.load(ca_cert.as_bytes(), FILETYPE_PEM) except OpenSSLError as e: raise ValueError( "Invalid certificate authority certificate found.", str(e), ) netloc = NetLocation(host=base_url.host, port=base_url.port) policy = ClientCertificatePolicyForHTTPS( credentials={}, trust_roots={ netloc: ca_cert, }, ) agent = HeaderInjectingAgent( _to_inject=Headers({u"authorization": [u"Bearer {}".format(token)]}), _agent=Agent(reactor, contextFactory=policy), ) return agent
class ChargeBeeCreateSubscription(Resource): def __init__(self, agent, site_name, secret_key, chargebee_domain): authorization = b64encode(secret_key + ":") self._agent = AuthenticatingAgent( agent, ("Authorization", "Basic {}".format(authorization)), ) self._site_name = site_name self._chargebee_domain = chargebee_domain self._uri = URL( u"https", u"{}.{}".format(self._site_name, self._chargebee_domain), [u"api", u"v2", u"estimates", u"create_subscription"], ) msg("Proxying to {}".format(self._uri)) def render_OPTIONS(self, request): request.responseHeaders.setRawHeaders( "Access-Control-Allow-Methods", ["POST"], ) return "" def render_POST(self, request): headers = request.requestHeaders.copy() headers.setRawHeaders("Host", [self._uri.host]) body = FileBodyProducer(BytesIO(request.content.read())) d = self._agent.request( "POST", self._uri.to_uri().to_text().encode("ascii"), headers, body, ) d.addCallback(self._proxy_response, request) return NOT_DONE_YET def _proxy_response(self, response, request): for k, vs in response.headers.getAllRawHeaders(): request.responseHeaders.setRawHeaders(k, vs) request.setResponseCode(response.code, response.phrase) d = readBody(response) d.addCallback(request.write) d.addErrback(err, "proxying estimates/create_subscription") d.addCallback(lambda ign: request.finish()) return d
def test_start_responding(self): """ Calling ``start_responding`` makes an appropriate resource available. """ token = b'BWYcfxzmOha7-7LoxziqPZIUr99BCz3BfbN9kzSFnrU' challenge = challenges.HTTP01(token=token) response = challenge.response(RSA_KEY_512) responder = HTTP01Responder() challenge_resource = Resource() challenge_resource.putChild(b'acme-challenge', responder.resource) root = Resource() root.putChild(b'.well-known', challenge_resource) client = StubTreq(root) encoded_token = challenge.encode('token') challenge_url = URL( host=u'example.com', path=[u'.well-known', u'acme-challenge', encoded_token]).asText() # We got page not found while the challenge is not yet active. result = yield client.get(challenge_url) self.assertEqual(404, result.code) # Once we enable the response. responder.start_responding(u'example.com', challenge, response) result = yield client.get(challenge_url) self.assertEqual(200, result.code) self.assertEqual(['text/plain'], result.headers.getRawHeaders('content-type')) result = yield result.content() self.assertEqual(response.key_authorization.encode('utf-8'), result) # Starting twice before stopping doesn't break things responder.start_responding(u'example.com', challenge, response) result = yield client.get(challenge_url) self.assertEqual(200, result.code) yield responder.stop_responding(u'example.com', challenge, response) result = yield client.get(challenge_url) self.assertEqual(404, result.code)
def create_marathon_acme( storage_dir, acme_directory, acme_email, allow_multiple_certs, marathon_addrs, marathon_timeout, sse_timeout, mlb_addrs, group, reactor): """ Create a marathon-acme instance. :param storage_dir: Path to the storage directory for certificates and the client key. :param acme_directory: Address for the ACME directory to use. :param acme_email: Email address to use when registering with the ACME service. :param allow_multiple_certs: Whether to allow multiple certificates per app port. :param marathon_addr: Address for the Marathon instance to find app domains that require certificates. :param marathon_timeout: Amount of time in seconds to wait for response headers to be received from Marathon. :param sse_timeout: Amount of time in seconds to wait for some event data to be received from Marathon. :param mlb_addrs: List of addresses for marathon-lb instances to reload when a new certificate is issued. :param group: The marathon-lb group (``HAPROXY_GROUP``) to consider when finding app domains. :param reactor: The reactor to use. """ storage_path, certs_path = init_storage_dir(storage_dir) acme_url = URL.fromText(_to_unicode(acme_directory)) key = maybe_key(storage_path) return MarathonAcme( MarathonClient(marathon_addrs, timeout=marathon_timeout, sse_kwargs={'timeout': sse_timeout}, reactor=reactor), group, DirectoryStore(certs_path), MarathonLbClient(mlb_addrs, reactor=reactor), create_txacme_client_creator(reactor, acme_url, key), reactor, acme_email, allow_multiple_certs)
def fromString(klass, url): """ Make a L{URLPath} from a L{str} or L{unicode}. @param url: A L{str} representation of a URL. @type url: L{str} or L{unicode}. @return: a new L{URLPath} derived from the given string. @rtype: L{URLPath} """ if not isinstance(url, (str, unicode)): raise ValueError("'url' must be a str or unicode") if isinstance(url, bytes): # On Python 2, accepting 'str' (for compatibility) means we might # get 'bytes'. On py3, this will not work with bytes due to the # check above. return klass.fromBytes(url) return klass._fromURL(_URL.fromText(url))
def setUp(self): super(TestSignupModule, self).setUp() self.mockconfigdir = FilePath('./test_signup').child('TestSignupModule') make_dirs(self.mockconfigdir.path) self.SIGNUPSPATH = 'mock_signups.csv' self.CONFIGFILEPATH = 'init_test_config.json' self.EC2SECRETPATH = 'mock_ec2_secret' self.S3SECRETPATH = 'mock_s3_secret' self.MONITORPUBKEYPATH = 'MONITORKEYS.pub' self.MEMAIL = 'MEMAIL' self.MKEYINFO = 'MKEYINFO' self.MCUSTOMER_ID = u'cus_x14Charactersx' self.MSUBSCRIPTION_ID = u'sub_x14Characterx' self.MPLAN_ID = 'XX_consumer_iteration_#_GREEKLETTER#_2XXX-XX-XX' self.MENCODED_IDS = 'on2wex3yge2eg2dbojqwg5dfoj4a-mn2xgx3yge2eg2dbojqwg5dfojzxq' FilePath(self.SIGNUPSPATH).setContent('') FilePath(self.CONFIGFILEPATH).setContent(CONFIGFILEJSON) FilePath(self.EC2SECRETPATH).setContent(MOCKEC2SECRETCONTENTS) FilePath(self.S3SECRETPATH).setContent(MOCKS3SECRETCONTENTS) FilePath(self.MONITORPUBKEYPATH).setContent(MONITORPUBKEY) self.DEPLOYMENT_CONFIGURATION = model.DeploymentConfiguration( domain=u"s4.example.com", kubernetes_namespace=u"testing", subscription_manager_endpoint=URL.fromText(u"http://localhost/"), s3_access_key_id=ZEROPRODUCT["s3_access_key_id"], s3_secret_key=MOCKS3SECRETCONTENTS, introducer_image=u"tahoe-introducer", storageserver_image=u"tahoe-storageserver", ) self.SUBSCRIPTION = model.SubscriptionDetails( bucketname="lae-" + self.MENCODED_IDS, oldsecrets=old_secrets().example(), customer_email=self.MEMAIL, customer_pgpinfo=self.MKEYINFO, product_id=u"filler", customer_id=self.MCUSTOMER_ID, subscription_id=self.MSUBSCRIPTION_ID, introducer_port_number=12345, storage_port_number=12346, )
def _finish_convergence_service(k8s_client, options, subscription_client): k8s = KubeClient(k8s=k8s_client) access_key_id = FilePath( options["aws-access-key-id-path"]).getContent().strip() secret_access_key = FilePath( options["aws-secret-access-key-path"]).getContent().strip() aws = AWSServiceRegion(creds=AWSCredentials( access_key=access_key_id, secret_key=secret_access_key, )) Message.log( event=u"convergence-service:key-notification", key_id=access_key_id.decode("ascii"), secret_key_hash=sha256(secret_access_key).hexdigest().decode("ascii"), ) # XXX I get to leave a ton of fields empty because I happen to know # they're not used in this codepath. :/ Maybe this suggests something has # gone wrong ... config = DeploymentConfiguration( domain=options["domain"].decode("ascii"), kubernetes_namespace=options["kubernetes-namespace"].decode("ascii"), subscription_manager_endpoint=URL.fromText( options["endpoint"].decode("ascii")), s3_access_key_id=access_key_id.decode("ascii"), s3_secret_key=secret_access_key.decode("ascii"), introducer_image=options["introducer-image"].decode("ascii"), storageserver_image=options["storageserver-image"].decode("ascii"), log_gatherer_furl=None, stats_gatherer_furl=None, ) return TimerService( options["interval"], divert_errors_to_log(converge, u"subscription_converger"), config, subscription_client, k8s, aws, )
def get_things_done(): """ Here is where the service part is setup and action is done. """ responders = yield start_responders() store = MemoryStore() # We first validate the directory. account_key = _get_account_key() try: client = yield Client.from_url( reactor, URL.fromText(acme_url.decode('utf-8')), key=JWKRSA(key=account_key), alg=RS256, ) except Exception as error: print('\n\nFailed to connect to ACME directory. %s' % (error, )) yield reactor.stop() defer.returnValue(None) service = AcmeIssuingService( email='[email protected],[email protected]', cert_store=store, client=client, clock=reactor, responders=responders, panic=on_panic, ) # Start the service and wait for it to start. yield service.start() # Wait for the existing certificate from the storage to be available. yield service.when_certs_valid() # Request a SAN ... if passed via command line. yield service.issue_cert(','.join(requested_domains)) yield service.stopService() print('That was all the example.')
def _finish_convergence_service( k8s_client, options, subscription_client, reactor, ): k8s = KubeClient(k8s=k8s_client) access_key_id = FilePath(options["aws-access-key-id-path"]).getContent().strip() secret_access_key = FilePath(options["aws-secret-access-key-path"]).getContent().strip() aws = AWSServiceRegion(creds=AWSCredentials( access_key=access_key_id, secret_key=secret_access_key, )) Message.log( event=u"convergence-service:key-notification", key_id=access_key_id.decode("ascii"), secret_key_hash=sha256(secret_access_key).hexdigest().decode("ascii"), ) config = DeploymentConfiguration( domain=options["domain"].decode("ascii"), kubernetes_namespace=options["kubernetes-namespace"].decode("ascii"), subscription_manager_endpoint=URL.fromText(options["endpoint"].decode("ascii")), s3_access_key_id=access_key_id.decode("ascii"), s3_secret_key=secret_access_key.decode("ascii"), introducer_image=options["introducer-image"].decode("ascii"), storageserver_image=options["storageserver-image"].decode("ascii"), log_gatherer_furl=options["log-gatherer-furl"], stats_gatherer_furl=options["stats-gatherer-furl"], ) return _convergence_service( reactor, options["interval"], config, subscription_client, k8s, aws, )
def urlFromBytes(b): return URL.fromText(b.decode("utf-8"))
def fetch_title(self, url, hostname_tag=False, friendly_errors=False): """Fetch the document at *url* and return a `Deferred` yielding the document title or summary as a Unicode string. *url* may be a Unicode string IRI, a byte string URI, or a Twisted `URL`. If *hostname_tag* is true, prefix the extracted title with the hostname of the initially requested URI or IRI in the form that was originally provided, as well as the hostname of the final ASCII-only URI if it differs due to redirects or normalization. If *friendly_errors* is true, catch common connection errors and return a description of the error as the extracted title instead of reraising. Otherwise, all errors bubble to the caller. """ title = None if isinstance(url, unicode): url = URL.fromText(url) elif isinstance(url, str): url = URL.fromText(url.decode('ascii')) current = url response = None for _ in xrange(self.max_soft_redirects): last_response = response # This encoding should be safe, since asURI() only returns # URIs with ASCII code points. request = self.agent.request( 'GET', current.asURI().asText().encode('ascii')) if friendly_errors: request.addErrback(describe_error) response = yield request if isinstance(response, basestring): # We got an error message from describe_error. Bail. title = response break response.setPreviousResponse(last_response) content_type = cgi.parse_header( response.headers.getRawHeaders('Content-Type', [''])[0])[0] if content_type in self.extractors: extractor = self.extractors[content_type] extracted = yield extractor.extract(response) if isinstance(extracted, Redirect): current = URL.fromText( response.request.absoluteURI.decode('ascii')).click( extracted.location) continue title = extracted # The only case where we'd want to loop again is when the # response returned is a soft redirect. break else: if friendly_errors: title = u'Encountered too many redirects.' else: raise ResponseFailed([Failure(InfiniteRedirection( 599, 'Too many soft redirects', location=current.asURI().asText().encode('ascii')))]) if title is None: title = u'{} document'.format(content_type or u'Unknown') if response.length is not UNKNOWN_LENGTH: title += u' ({})'.format(filesize(response.length)) if hostname_tag: tag = url.host if isinstance(response, Response): initial = url.host final = URL.fromText( response.request.absoluteURI.decode('ascii')).host if initial != final: tag = u'{} \u2192 {}'.format(initial, final) title = u'[{}] {}'.format(tag, title) returnValue(title)
from twisted.web.client import Agent, HTTPConnectionPool from twisted.web.http_headers import Headers from txacme import __version__ from txacme.logging import ( LOG_ACME_ANSWER_CHALLENGE, LOG_ACME_CONSUME_DIRECTORY, LOG_ACME_CREATE_AUTHORIZATION, LOG_ACME_FETCH_CHAIN, LOG_ACME_POLL_AUTHORIZATION, LOG_ACME_REGISTER, LOG_ACME_REQUEST_CERTIFICATE, LOG_ACME_UPDATE_REGISTRATION, LOG_HTTP_PARSE_LINKS, LOG_JWS_ADD_NONCE, LOG_JWS_CHECK_RESPONSE, LOG_JWS_GET, LOG_JWS_GET_NONCE, LOG_JWS_HEAD, LOG_JWS_POST, LOG_JWS_REQUEST, LOG_JWS_SIGN) from txacme.util import tap LETSENCRYPT_DIRECTORY = URL.fromText( u'https://acme-v01.api.letsencrypt.org/directory') LETSENCRYPT_STAGING_DIRECTORY = URL.fromText( u'https://acme-staging.api.letsencrypt.org/directory') # Borrowed from requests, with modifications. def _parse_header_links(response): """ Parse the links from a Link: header field. .. todo:: Links with the same relation collide at the moment. :param bytes value: The header value.
def test_wormhole_tahoe_configuration( self, customer_email, customer_id, subscription_id, old_secrets, introducer_port_number, storage_port_number, ): """ The wormhole signup mechanism sends a JSON blob of Tahoe-LAFS configuration via a magic wormhole identified by a wormhole code produced during signup. """ assume(introducer_port_number != storage_port_number) provisioned = [] def provision_subscription( smclient, subscription, ): p = attr.assoc( subscription, introducer_port_number=introducer_port_number, storage_port_number=storage_port_number, oldsecrets=old_secrets, ) provisioned.append(p) return succeed(p) plan_identifier = u"foobar" reactor = Clock() server = MemoryWormholeServer() provisioner = get_provisioner( reactor, URL.fromText(u"http://subscription-manager/"), provision_subscription, ) signup = get_wormhole_signup( reactor, provisioner, server, URL.fromText(u"ws://foo.invalid/"), FilePath(self.mktemp()), ) d = signup.signup(customer_email, customer_id, subscription_id, plan_identifier) wormhole_claim = self.successResultOf(d) wh = server.create( APPID, u"ws://foo.invalid/", reactor, ) wh.set_code(wormhole_claim.code) d = wh.when_code() def foo(x): wh.send_message('{"abilities": {"client-v1": {}}}') return wh.get_message() d.addCallback(foo) def bar(arg): self.assertEqual( loads(arg), {"abilities": {"server-v1":{}}} ) return wh.get_message() d.addCallback(bar) received = self.successResultOf(d) received_config = loads(received) self.assertThat( received_config["introducer"], Equals(provisioned[0].external_introducer_furl), )
def request(self, method, url, **kwargs): method = method.encode('ascii').upper() # Join parameters provided in the URL # and the ones passed as argument. params = kwargs.get('params') if params: url = _combine_query_params(url, params) if isinstance(url, unicode): url = URL.fromText(url).asURI().asText().encode('ascii') # Convert headers dictionary to # twisted raw headers format. headers = kwargs.get('headers') if headers: if isinstance(headers, dict): h = Headers({}) for k, v in headers.items(): if isinstance(k, unicode): k = k.encode('ascii') if isinstance(v, bytes): h.addRawHeader(k, v) elif isinstance(v, unicode): h.addRawHeader(k, v.encode('ascii')) elif isinstance(v, list): cleanHeaders = [] for item in v: if isinstance(item, unicode): cleanHeaders.append(item.encode('ascii')) else: cleanHeaders.append(item) h.setRawHeaders(k, cleanHeaders) else: h.setRawHeaders(k, v) headers = h else: headers = Headers({}) # Here we choose a right producer # based on the parameters passed in. bodyProducer = None data = kwargs.get('data') files = kwargs.get('files') if files: # If the files keyword is present we will issue a # multipart/form-data request as it suits better for cases # with files and/or large objects. files = list(_convert_files(files)) boundary = str(uuid.uuid4()).encode('ascii') headers.setRawHeaders( b'content-type', [ b'multipart/form-data; boundary=' + boundary]) if data: data = _convert_params(data) else: data = [] bodyProducer = multipart.MultiPartProducer( data + files, boundary=boundary) elif data: # Otherwise stick to x-www-form-urlencoded format # as it's generally faster for smaller requests. if isinstance(data, (dict, list, tuple)): headers.setRawHeaders( b'content-type', [b'application/x-www-form-urlencoded']) data = urlencode(data, doseq=True) bodyProducer = self._data_to_body_producer(data) cookies = kwargs.get('cookies', {}) if not isinstance(cookies, CookieJar): cookies = cookiejar_from_dict(cookies) cookies = merge_cookies(self._cookiejar, cookies) wrapped_agent = CookieAgent(self._agent, cookies) if kwargs.get('allow_redirects', True): if kwargs.get('browser_like_redirects', False): wrapped_agent = BrowserLikeRedirectAgent(wrapped_agent) else: wrapped_agent = RedirectAgent(wrapped_agent) wrapped_agent = ContentDecoderAgent(wrapped_agent, [(b'gzip', GzipDecoder)]) auth = kwargs.get('auth') if auth: wrapped_agent = add_auth(wrapped_agent, auth) d = wrapped_agent.request( method, url, headers=headers, bodyProducer=bodyProducer) timeout = kwargs.get('timeout') if timeout: delayedCall = default_reactor(kwargs.get('reactor')).callLater( timeout, d.cancel) def gotResult(result): if delayedCall.active(): delayedCall.cancel() return result d.addBoth(gotResult) if not kwargs.get('unbuffered', False): d.addCallback(_BufferedResponse) return d.addCallback(_Response, cookies)
def test_emailed_introducer_furl( self, customer_email, customer_id, subscription_id, old_secrets, introducer_port_number, storage_port_number, ): """ The email signup mechanism sends an activation email including an introducer furl which points at the server and port identified by the activated subscription detail object. """ assume(introducer_port_number != storage_port_number) emails = [] def provision_subscription( smclient, subscription, ): return succeed( attr.assoc( subscription, introducer_port_number=introducer_port_number, storage_port_number=storage_port_number, oldsecrets=old_secrets, ), ) def send_signup_confirmation( customer_email, external_introducer_furl, customer_keyinfo, stdout, stderr, ): emails.append((customer_email, "success", external_introducer_furl)) return succeed(None) def send_notify_failure( reason, customer_email, logfilename, stdout, stderr, ): emails.append((customer_email, "failure", reason)) return succeed(None) plan_identifier = u"foobar" reactor = object() signup = get_email_signup( reactor, get_provisioner( reactor, URL.fromText(u"http://subscription-manager/"), provision_subscription, ), send_signup_confirmation, send_notify_failure, ) d = signup.signup(customer_email, customer_id, subscription_id, plan_identifier) self.successResultOf(d) [(recipient, result, rest)] = emails self.expectThat(recipient, Equals(customer_email)) self.expectThat(result, Equals("success")) def get_hint_port(furl): tub_id, location_hints, name = decode_furl(furl) host, port = location_hints[0].split(u":") return int(port) self.expectThat( rest, AfterPreprocessing( get_hint_port, Equals(introducer_port_number), ), )