def close(self): class T: def stop(self): pass def cbClose(ignored): fh.close() return ignored def ebBuckets(p): p.printTraceback() for r in getattr(p.value, 'reasons', []): r.printTraceback() fh = open(self.fObj.name) fileReader = FileBodyProducer(fh, readSize=64 * 1024) fileReader._task = T() d = self._object.upload(fileReader) d.addBoth(cbClose) d.addErrback(ebBuckets) return d
def decrypt_bytes(): producer = FileBodyProducer(read_handle) buff = BytesIO() yield producer.startProducing(buff) self.buff = buff.getvalue() self.len_read += len(self.buff) write_bytes() finish_decrypt()
def encrypt_blob(filename, key, iv): dummy_announcer = DummyHashAnnouncer() manager = DiskBlobManager(dummy_announcer, '.', '.') yield manager.setup() creator = CryptStreamCreator(manager, filename, key, iv_generator(iv)) with open(filename, 'r') as infile: producer = FileBodyProducer(infile, readSize=2**22) yield producer.startProducing(creator) yield creator.stop()
def _save_verified_blob(self, writer): if self.saved_verified_blob is False: writer.write_handle.seek(0) out_path = os.path.join(self.blob_dir, self.blob_hash) producer = FileBodyProducer(writer.write_handle) yield producer.startProducing(open(out_path, 'wb')) self.saved_verified_blob = True defer.returnValue(True) else: raise DownloadCanceledError()
def test_put_voucher(self, get_config, voucher): """ When a voucher is ``PUT`` to ``VoucherCollection`` it is passed in to the redemption model object for handling and an ``OK`` response is returned. """ tempdir = self.useFixture(TempDir()) config = get_config(tempdir.join(b"tahoe"), b"tub.port") root = root_from_config(config, datetime.now) agent = RequestTraversalAgent(root) producer = FileBodyProducer( BytesIO(dumps({u"voucher": voucher})), cooperator=uncooperator(), ) requesting = agent.request( b"PUT", b"http://127.0.0.1/voucher", bodyProducer=producer, ) self.addDetail( u"requesting result", text_content(u"{}".format(vars(requesting.result))), ) self.assertThat( requesting, succeeded(ok_response(), ), )
def test_put_invalid_body(self, get_config, body): """ If the body of a ``PUT`` to ``VoucherCollection`` does not consist of an object with a single *voucher* property then the response is *BAD REQUEST*. """ tempdir = self.useFixture(TempDir()) config = get_config(tempdir.join(b"tahoe"), b"tub.port") root = root_from_config(config, datetime.now) agent = RequestTraversalAgent(root) producer = FileBodyProducer( BytesIO(body), cooperator=uncooperator(), ) requesting = agent.request( b"PUT", b"http://127.0.0.1/voucher", bodyProducer=producer, ) self.addDetail( u"requesting result", text_content(u"{}".format(vars(requesting.result))), ) self.assertThat( requesting, succeeded(bad_request_response(), ), )
def async_send(self, data, headers, success_cb, failure_cb): d = self._agent.request( 'POST', self._url, bodyProducer=FileBodyProducer(io.BytesIO(data)), headers=Headers(dict(((k, [v]) for k, v in headers.items())))) def on_failure(failure): ex = failure.check(ResponseNeverReceived) if ex: failure_cb([f.value for f in failure.value.reasons]) else: failure_cb(failure.value) def on_success(response): if response.code == 200: success_cb() else: def on_error_body(body): failure_cb(Exception(response.code, response.phrase, body)) return readBody(response).addCallback(on_error_body) d.addCallback(on_success).addErrback(on_failure)
def test_post(self, get_config, voucher, unblinded_tokens): """ When the unblinded token collection receives a **POST**, the unblinded tokens in the request body are inserted into the system and an OK response is generated. """ tempdir = self.useFixture(TempDir()) config = get_config(tempdir.join(b"tahoe"), b"tub.port") root = root_from_config(config, datetime.now) agent = RequestTraversalAgent(root) producer = FileBodyProducer( BytesIO( dumps({ u"unblinded-tokens": list(token.unblinded_token for token in unblinded_tokens) })), cooperator=uncooperator(), ) requesting = agent.request( b"POST", b"http://127.0.0.1/unblinded-token", bodyProducer=producer, ) self.assertThat( requesting, succeeded(ok_response(headers=application_json()), ), ) stored_tokens = root.controller.store.backup()[u"unblinded-tokens"] self.assertThat( stored_tokens, Equals(list(token.unblinded_token for token in unblinded_tokens)), )
def getMetrics(self, uuid, dpNames, cf='AVERAGE', rate=False, downsample="1h-avg", start=None, end=None, deviceId=None, returnSet="EXACT"): metrics = [] if isinstance(dpNames, basestring): dpNames = [dpNames] for dpName in dpNames: # TODO find callers name = ensure_prefix(deviceId, dpName) metrics.append( dict(metric=name, aggregator=self._aggMapping.get(cf.lower(), cf.lower()), rpn='', rate=rate, format='%.2lf', tags=dict(contextUUID=[uuid]), name='%s_%s' % (uuid, dpName))) request = dict(returnset=returnSet, start=start, end=end, downsample=downsample, metrics=metrics) body = FileBodyProducer(StringIO(json.dumps(request))) d = self.agent.request('POST', self._metric_url, self._headers, body) return d
def _put_incoming(self, user, blob_id, scheme, db, request): raw_content = request.content.read() preamble = self.formatter.preamble(raw_content, blob_id) request.content = BytesIO(preamble + ' ' + raw_content) def catchBlobExists(failure): failure.trap(BlobExists) request.setResponseCode(409) request.write("Blob already exists: %s" % blob_id) request.finish() def catchQuotaExceeded(failure): failure.trap(QuotaExceeded) logger.error("Error 507: Quota exceeded for user: %s" % user) request.setResponseCode(507) request.write('Quota Exceeded!') request.finish() producer = FileBodyProducer(request.content) d = db.write_blob(user, blob_id, producer, namespace='MX') flags = [Flags.PENDING] d.addCallback(lambda _: db.set_flags(user, blob_id, flags, namespace='MX')) d.addCallback(lambda _: request.finish()) d.addErrback(catchBlobExists) d.addErrback(catchQuotaExceeded) d.addErrback(self._error, request)
def _notify(self, assoc, attempt): mxid = assoc["mxid"] domain = mxid.split(":")[-1] server = yield self._pickServer(domain) callbackUrl = "https://%s/_matrix/federation/v1/3pid/onbind" % ( server, ) logger.info("Making bind callback to: %s", callbackUrl) # TODO: Not be woefully insecure agent = Agent(reactor, InsecureInterceptableContextFactory()) reqDeferred = agent.request( "POST", callbackUrl.encode("utf8"), Headers({ "Content-Type": ["application/json"], "User-Agent": ["Sydent"], }), FileBodyProducer(StringIO(json.dumps(assoc))) ) reqDeferred.addCallback( lambda _: logger.info("Successfully notified on bind for %s" % (mxid,)) ) reqDeferred.addErrback( lambda err: self._notifyErrback(assoc, attempt, err) )
async def _perform_http_request(self, body, headers): """ Perform an HTTP request to the FCM server with the body and headers specified. Args: body (nested dict): Body. Will be JSON-encoded. headers (Headers): HTTP Headers. Returns: """ body_producer = FileBodyProducer(BytesIO(json.dumps(body).encode())) # we use the semaphore to actually limit the number of concurrent # requests, since the HTTPConnectionPool will actually just lead to more # requests being created but not pooled – it does not perform limiting. await self.connection_semaphore.acquire() try: response = await self.http_agent.request( b"POST", GCM_URL, headers=Headers(headers), bodyProducer=body_producer) response_text = (await readBody(response)).decode() except Exception as exception: raise TemporaryNotificationDispatchException( "GCM request failure") from exception finally: self.connection_semaphore.release() return response, response_text
def test_failedReadWhileProducing(self): """ If a read from the input file fails while producing bytes to the consumer, the L{Deferred} returned by L{MultiPartProducer.startProducing} fires with a L{Failure} wrapping that exception. """ class BrokenFile(object): def read(self, count): raise IOError("Simulated bad thing") producer = MultiPartProducer( { "field": ("file name", "text/hello-world", FileBodyProducer(BrokenFile(), cooperator=self.cooperator)) }, cooperator=self.cooperator, boundary=b"heyDavid") complete = producer.startProducing(BytesIO()) while self._scheduled: self._scheduled.pop(0)() self.failureResultOf(complete).trap(IOError)
def login(self): self.logged_in = False user = yield self.config.get("user") passwd = yield self.config.get("pass") url = 'https://www.crunchyroll.com/?a=formhandler' headers = Headers({ 'Content-Type': ['application/x-www-form-urlencoded'], 'Referer': ['https://www.crunchyroll.com'], 'User-Agent': [ 'Mozilla/5.0 (Macintosh; Intel Mac OS X 10.8; rv:17.0) Gecko/17.0 Firefox/17.0' ] }) data = FileBodyProducer( StringIO( urllib.urlencode({ 'formname': 'RpcApiUser_Login', 'next_url': '', 'fail_url': '/login', 'name': user, 'password': passwd }))) response = yield self.agent.request("POST", url, headers, data) self.logged_in = True
def testCreateCommitProvision(self): agent = Agent(reactor) header = Headers({'User-Agent': ['OpenNSA Test Client'], 'Host': ['localhost'] } ) payload = { "source" : "aruba:topology:ps?vlan=1783", "destination" : "aruba:topology:bon?vlan=1783", "auto_commit" : False } payload_data = json.dumps(payload) create_url = 'http://localhost:%i%s' % (self.PORT, rest.PATH) producer = FileBodyProducer(StringIO(payload_data)) resp = yield agent.request('POST', create_url, header, producer) self.failUnlessEqual(resp.code, 201, 'Service did not return created') if not resp.headers.hasHeader('location'): self.fail('No location header in create response') conn_url = 'http://localhost:%i%s' % (self.PORT, resp.headers.getRawHeaders('location')[0]) # so... the connection will not necesarely have moved into reserveheld or all sub-connections might not even be in place yet # we cannot really commit until we are in created and ReserveHeld # the clock doesn't really do anything here (not scheduling related) yield task.deferLater(reactor, 0.1, self._createCommitProvisionCB, agent, conn_url, header)
def test_set_pxe_boot(self): driver = RedfishPowerDriver() context = make_context() url = driver.get_url(context) node_id = b"1" headers = driver.make_auth_headers(**context) mock_file_body_producer = self.patch(redfish_module, "FileBodyProducer") payload = FileBodyProducer( BytesIO( json.dumps({ "Boot": { "BootSourceOverrideEnabled": "Once", "BootSourceOverrideTarget": "Pxe", } }).encode("utf-8"))) mock_file_body_producer.return_value = payload mock_redfish_request = self.patch(driver, "redfish_request") yield driver.set_pxe_boot(url, node_id, headers) self.assertThat( mock_redfish_request, MockCalledOnceWith( b"PATCH", join(url, b"redfish/v1/Systems/%s" % node_id), headers, payload, ), )
def create(self, subscription_id, details): """ Create a new, active subscription. This issues a ``POST`` to ``/v1/subscriptions``. :param unicode subscription_id: The unique identifier for this new subscription. :param SubscriptionDetails details: The details of the subscription. :return: A ``Deferred`` that fires when the subscription has been created. """ if details.subscription_id != subscription_id: raise ValueError( "subscription_id must equal value in details object; " "{} != {}".format(details.subscription_id, subscription_id)) d = self.agent.request( b"POST", self._url(u"v1", u"subscriptions"), bodyProducer=FileBodyProducer( BytesIO(dumps(marshal_subscription(details))), cooperator=self.cooperator, ), ) d.addCallback(require_code(CREATED)) d.addCallback(readBody) d.addCallback(lambda body: SubscriptionDetails(**loads(body))) return d
def postJson(self, uri: str, jsonObject: JsonDict) -> Optional["Deferred[Response]"]: """ Sends an POST request over HTTPS. :param uri: The URI to send the request to. :param jsonObject: The request's body. :return: The request's response. """ logger.debug("POSTing request to %s", uri) if not self.agent: logger.error("HTTPS post attempted but HTTPS is not configured") return None headers = Headers({ "Content-Type": ["application/json"], "User-Agent": ["Sydent"] }) json_bytes = json.dumps(jsonObject).encode("utf8") reqDeferred = self.agent.request(b"POST", uri.encode("utf8"), headers, FileBodyProducer(BytesIO(json_bytes))) return reqDeferred
def test_unicodeAttachmentName(self): """ Make sure unicode attachment names are supported. """ output, producer = self.getOutput(MultiPartProducer( { "field": (u'Так себе имя.jpg', "image/jpeg", FileBodyProducer(inputFile=BytesIO(b"my lovely bytes"), cooperator=self.cooperator)) }, cooperator=self.cooperator, boundary=b"heyDavid"), with_producer=True) expected = self.newLines(u"""--heyDavid Content-Disposition: form-data; name="field"; filename="Так себе имя.jpg" Content-Type: image/jpeg Content-Length: 15 my lovely bytes --heyDavid-- """.encode("utf-8")) self.assertEqual(len(expected), producer.length) self.assertEqual(expected, output)
def test_missingAttachmentName(self): """ Make sure attachments without names are supported """ output, producer = self.getOutput(MultiPartProducer( { "field": (None, "image/jpeg", FileBodyProducer( inputFile=BytesIO(b"my lovely bytes"), cooperator=self.cooperator, )) }, cooperator=self.cooperator, boundary=b"heyDavid"), with_producer=True) expected = self.newLines(b"""--heyDavid Content-Disposition: form-data; name="field" Content-Type: image/jpeg Content-Length: 15 my lovely bytes --heyDavid-- """) self.assertEqual(len(expected), producer.length) self.assertEqual(expected, output)
def test_pauseProducing(self): """ L{MultiPartProducer.pauseProducing} temporarily suspends writing bytes from the input file to the given L{IConsumer}. """ inputFile = BytesIO(b"hello, world!") consumer = output = BytesIO() producer = MultiPartProducer( { "field": ("file name", "text/hello-world", FileBodyProducer(inputFile, cooperator=self.cooperator)) }, cooperator=self.cooperator, boundary=b"heyDavid") complete = producer.startProducing(consumer) self._scheduled.pop(0)() currentValue = output.getvalue() self.assertTrue(currentValue) producer.pauseProducing() # Sort of depends on an implementation detail of Cooperator: even # though the only task is paused, there's still a scheduled call. If # this were to go away because Cooperator became smart enough to cancel # this call in this case, that would be fine. self._scheduled.pop(0)() # Since the producer is paused, no new data should be here. self.assertEqual(output.getvalue(), currentValue) self.assertNoResult(complete)
def test_resumeProducing(self): """ L{MultoPartProducer.resumeProducing} re-commences writing bytes from the input file to the given L{IConsumer} after it was previously paused with L{MultiPartProducer.pauseProducing}. """ inputFile = BytesIO(b"hello, world!") consumer = output = BytesIO() producer = MultiPartProducer( { "field": ("file name", "text/hello-world", FileBodyProducer(inputFile, cooperator=self.cooperator)) }, cooperator=self.cooperator, boundary=b"heyDavid") producer.startProducing(consumer) self._scheduled.pop(0)() currentValue = output.getvalue() self.assertTrue(currentValue) producer.pauseProducing() producer.resumeProducing() self._scheduled.pop(0)() # make sure we started producing new data after resume self.assertTrue(len(currentValue) < len(output.getvalue()))
def get_page(self, url, *args, **kwds): """ Define our own get_page method so that we can easily override the factory when we need to. This was copied from the following: * twisted.web.client.getPage * twisted.web.client._makeGetterFactory """ contextFactory = None scheme, host, port, path = parse(url) data = kwds.get('postdata', None) self._method = method = kwds.get('method', 'GET') self.request_headers = self._headers(kwds.get('headers', {})) if (self.body_producer is None) and (data is not None): self.body_producer = FileBodyProducer(StringIO(data)) if self.endpoint.ssl_hostname_verification: contextFactory = None else: contextFactory = WebClientContextFactory() agent = _get_agent(scheme, host, self.reactor, contextFactory) if scheme == "https": self.client.url = url d = agent.request(method, url, self.request_headers, self.body_producer) d.addCallback(self._handle_response) return d
def test_startProducing(self): """ L{MultiPartProducer.startProducing} starts writing bytes from the input file to the given L{IConsumer} and returns a L{Deferred} which fires when they have all been written. """ consumer = output = BytesIO() producer = MultiPartProducer( { b"field": ('file name', "text/hello-world", FileBodyProducer(BytesIO(b"Hello, World"), cooperator=self.cooperator)) }, cooperator=self.cooperator, boundary=b"heyDavid") complete = producer.startProducing(consumer) iterations = 0 while self._scheduled: iterations += 1 self._scheduled.pop(0)() self.assertTrue(iterations > 1) self.assertEqual( self.newLines(b"""--heyDavid Content-Disposition: form-data; name="field"; filename="file name" Content-Type: text/hello-world Content-Length: 12 Hello, World --heyDavid-- """), output.getvalue()) self.assertEqual(None, self.successResultOf(complete))
def assertResponseCode(self, method, path, request_body, expected_code): """ Issue an HTTP request and make an assertion about the response code. :param bytes method: The HTTP method to use in the request. :param bytes path: The resource path to use in the request. :param dict request_body: A JSON-encodable object to encode (as JSON) into the request body. Or ``None`` for no request body. :param int expected_code: The status code expected in the response. :return: A ``Deferred`` that will fire when the response has been received. It will fire with a failure if the status code is not what was expected. Otherwise it will fire with an ``IResponse`` provider representing the response. """ if request_body is None: headers = None body_producer = None else: headers = Headers({b"content-type": [b"application/json"]}) body_producer = FileBodyProducer(BytesIO(dumps(request_body))) requesting = self.agent.request(method, path, headers, body_producer) def check_code(response): self.assertEqual(expected_code, response.code) return response requesting.addCallback(check_code) return requesting
def load(self, details): """ Load existing subscription details into the system as an active subscription. This issues a ``PUT`` to ``/v1/subscriptions/<id>``. :param SubscriptionDetails details: The existing subscription details, including node secrets. :return: A ``Deferred`` that fires when the subscription has been loaded. """ d = self.agent.request( b"PUT", self._url(u"v1", u"subscriptions", details.subscription_id), bodyProducer=FileBodyProducer( BytesIO(dumps(marshal_subscription(details))), cooperator=self.cooperator, ), ) d.addCallback(require_code(CREATED)) d.addCallback(readBody) d.addCallback(lambda body: SubscriptionDetails(**loads(body))) return d
def postJson(self, uri, jsonObject): """ Sends an POST request over HTTPS. :param uri: The URI to send the request to. :type uri: unicode :param jsonObject: The request's body. :type jsonObject: dict[any, any] :return: The request's response. :rtype: twisted.internet.defer.Deferred[twisted.web.iweb.IResponse] """ logger.debug("POSTing request to %s", uri) if not self.agent: logger.error("HTTPS post attempted but HTTPS is not configured") return headers = Headers({ 'Content-Type': ['application/json'], 'User-Agent': ['Sydent'] }) json_bytes = json.dumps(jsonObject).encode("utf8") reqDeferred = self.agent.request(b'POST', uri.encode('utf8'), headers, FileBodyProducer(BytesIO(json_bytes))) return reqDeferred
def send(self, method, url, headers=None, data=None, files=None, callback=None, errback=None): bProducer = None if data: bProducer = StringProducer(data) elif files: if len(files) > 1: raise Exception('twisted transport currently only accepts one' ' multipart file') boundary, body = encodeForm(files[0][0], files[0][1][1], files[0][1][2]) if headers is None: headers = {} headers['Content-Type'] =\ "multipart/form-data; boundary={}".format(boundary) bProducer = FileBodyProducer(StringIO.StringIO(body)) theaders = None if headers: theaders = Headers({str(k): [str(v)] for (k, v) in headers.iteritems()}) d = self.agent.request(method, str(url), theaders, bProducer) d.addCallback(self._callback, callback, data, headers) d.addErrback(self._errback, errback) return d
class BlobEncryptor(object): """ Produces encrypted data from the cleartext data associated with a given SoledadDocument using AES-256 cipher in GCM mode. The production happens using a Twisted's FileBodyProducer, which uses a Cooperator to schedule calls and can be paused/resumed. Each call takes at most 65536 bytes from the input. Both the production input and output are file descriptors, so they can be applied to a stream of data. """ def __init__(self, doc_info, content_fd, secret=None): if not secret: raise EncryptionDecryptionError('no secret given') self.doc_id = doc_info.doc_id self.rev = doc_info.rev self._content_fd = content_fd content_fd.seek(0, os.SEEK_END) self._content_size = content_fd.tell() content_fd.seek(0) self._producer = FileBodyProducer(content_fd, readSize=2**16) self.sym_key = _get_sym_key_for_doc(doc_info.doc_id, secret) self._aes = AESWriter(self.sym_key) self._aes.authenticate(self._encode_preamble()) @property def iv(self): return self._aes.iv @property def tag(self): return self._aes.tag def encrypt(self): """ Starts producing encrypted data from the cleartext data. :return: A deferred which will be fired when encryption ends and whose callback will be invoked with the resulting ciphertext. :rtype: twisted.internet.defer.Deferred """ d = self._producer.startProducing(self._aes) d.addCallback(lambda _: self._end_crypto_stream()) return d def _encode_preamble(self): current_time = int(time.time()) return PACMAN.pack(BLOB_SIGNATURE_MAGIC, ENC_SCHEME.symkey, ENC_METHOD.aes_256_gcm, current_time, self.iv, str(self.doc_id), str(self.rev), self._content_size) def _end_crypto_stream(self): preamble, encrypted = self._aes.end() result = BytesIO() result.write(base64.urlsafe_b64encode(preamble)) result.write(' ') result.write(base64.urlsafe_b64encode(encrypted + self.tag)) return defer.succeed(result)
def async_send(self, url, data, headers, success_cb, failure_cb): d = self._agent.request( b"POST", url, bodyProducer=FileBodyProducer(io.BytesIO(data)), headers=Headers(dict((k, [v]) for k, v in headers.items()))) def on_failure(failure): ex = failure.check(ResponseNeverReceived) if ex: # ResponseNeverReceived wraps the actual error(s). failure_cb([f.value for f in failure.value.reasons]) else: failure_cb(failure.value) def on_success(response): """ Success only means that the request succeeded, *not* that the actual submission was successful. """ if response.code == 200: success_cb() else: def on_error_body(body): failure_cb(Exception(response.code, response.phrase, body)) return readBody(response).addCallback(on_error_body, ) d.addCallback(on_success, ).addErrback(on_failure, )
def test_write_cannot_exceed_quota(self, isfile): isfile.return_value = False backend = _blobs.FilesystemBlobsBackend(blobs_path=self.tempdir) backend.get_total_storage = lambda x: defer.succeed(100) backend.quota = 90 with pytest.raises(_blobs.QuotaExceeded): producer = FileBodyProducer(io.BytesIO('a' * 100)) yield backend.write_blob('user', 'blob_id', producer)
def async_send(self, data, headers, success_cb, failure_cb): d = self._agent.request( b"POST", self._url, bodyProducer=FileBodyProducer(io.BytesIO(data)), headers=Headers(dict((k, [v]) for k, v in headers.items())) ) d.addCallback(lambda r: success_cb()) d.addErrback(lambda f: failure_cb(f.value))
def put(self, blob_id, blob_fd, size=None, namespace=''): logger.info("Saving blob in local database...") insert = 'INSERT INTO blobs (blob_id, namespace, payload) ' insert += 'VALUES (?, ?, zeroblob(?))' values = (blob_id, namespace, size) irow = yield self.dbpool.insertAndGetLastRowid(insert, values) handle = yield self.dbpool.blob('blobs', 'payload', irow, 1) blob_fd.seek(0) # XXX I have to copy the buffer here so that I'm able to # return a non-closed file to the caller (blobmanager.get) # FIXME should remove this duplication! # have a look at how treq does cope with closing the handle # for uploading a file producer = FileBodyProducer(blob_fd) done = yield producer.startProducing(handle) logger.info("Finished saving blob in local database.") defer.returnValue(done)
def request(self, api_call, data): agent = Agent(self.reactor) header = {'User-Agent': [self.agent], 'content-type': ['application/x-www-form-urlencoded']} data = urlencode(data) return agent.request( 'POST', self._api[api_call], Headers(header), FileBodyProducer(StringIO(data)))
def close(self): self.length = self.len_so_far self.blob_hash = self._hashsum.hexdigest() if self.blob_hash and self._is_open and self.length > 0: # do not save 0 length files (empty tail blob in streams) # or if its been closed already self.buffer.seek(0) out_path = os.path.join(self.blob_dir, self.blob_hash) producer = FileBodyProducer(self.buffer) yield producer.startProducing(open(out_path, 'wb')) self._is_open = False if self.length > 0: defer.returnValue(self.blob_hash) else: # 0 length files (empty tail blob in streams ) # must return None as their blob_hash for # it to be saved properly by EncryptedFileMetadataManagers defer.returnValue(None)
def open_transfer_file_read(self, transfer): try: fp = open(transfer.path, 'rb') self.file_producer = FileBodyProducer(fp, readSize=transfer.block_size) self.transfer_file = transfer except IOError: return False return True
def __init__(self, doc_info, content_fd, secret=None, armor=True, sink=None): if not secret: raise EncryptionDecryptionError('no secret given') self.doc_id = doc_info.doc_id self.rev = doc_info.rev self.armor = armor self._content_fd = content_fd self._content_size = self._get_rounded_size(content_fd) self._producer = FileBodyProducer(content_fd, readSize=2**16) self.sym_key = _get_sym_key_for_doc(doc_info.doc_id, secret) self._aes = AESWriter(self.sym_key, _buffer=sink) self._aes.authenticate(self._encode_preamble())
class Protocol(CommonProtocol, Fysom): message_dispatcher = MessageDispatcher().register( Connect, Quit, SendChat, RequestFileTransfer ) async_transitions = {'connect', 'send_file'} def __init__(self, client_server_proto, user_name, is_initiator=False, file_receive_path=None): CommonProtocol.__init__(self) Fysom.__init__(self, initial='not_connected', events=[ # event / from / to ('connect', 'not_connected', 'connected'), ('disconnect', '*', 'done'), ('accept_connection', 'not_connected', 'connected'), ('send_chat', 'connected', 'connected'), ('receive_chat', 'connected', 'connected'), ('send_file', 'connected', 'sending_file'), ('receive_file', 'connected', 'receiving_file'), ('send_file_success', 'sending_file', 'connected'), ('send_file_failure', 'sending_file', 'connected'), ('receive_file_success', 'receiving_file', 'connected'), ('receive_file_failure', 'receiving_file', 'connected') ]) self.client_server_proto = client_server_proto self.other_user_name = user_name self.transfer_file = None self.file_producer = None self.file_consumer = None self.is_initiator = is_initiator self.receive_path = file_receive_path if not self.receive_path: self.receive_path = os.path.abspath(os.getcwd()) def on_message_received(self, message): for msg_cls, action in { Connect: lambda m: self.accept_connection(), Quit: lambda m: self.disconnect(), RequestFileTransfer: lambda m: self.receive_file(TransferFile.from_message(m)), SendChat: lambda m: self.receive_chat(m.message) }.items(): if isinstance(message, msg_cls): action(message) return def rawDataReceived(self, data): assert self.file_consumer is not None self.file_consumer.write(data) def on_before_connect(self, _): def on_response(response): if self.check_response_error(response): self.log("Connection rejected") self.cancel_transition() self.disconnect() else: self.log("Connected") self.transition() self.send_message(Connect(), on_response) def on_accept_connection(self, _): self.log("Accepting incoming connection") self.send_response({}) def on_before_send_chat(self, event): message = event.args[0] def on_response(response): if self.check_response_error(response): self.log("Received negative chat ack") else: self.log("Received positive chat ack") self.send_message(SendChat(message), on_response) def on_receive_chat(self, event): self.log("Received chat message: '{message}'", message = event.args[0]) self.send_response({}) def open_transfer_file_read(self, transfer): try: fp = open(transfer.path, 'rb') self.file_producer = FileBodyProducer(fp, readSize=transfer.block_size) self.transfer_file = transfer except IOError: return False return True def on_before_send_file(self, event): transfer = event.args[0] def on_response(response): if self.check_response_error(response): self.log("Received error after file transfer request") elif response.get('result') != 'confirmed': self.log("File transfer request denied") else: self.log("File transfer request accepted, starting") if self.open_transfer_file_read(transfer): self.transition() return self.cancel_transition() self.send_message(transfer.to_message(), on_response).addErrback( lambda _: self.cancel_transition()) def on_enter_sending_file(self, _): assert self.transfer_file is not None assert self.file_producer is not None self.setRawMode() d = self.file_producer.startProducing(self.transport) def on_success(_): self.log("File send successfully") self.send_file_success() def on_failure(failure): failure.trap(Exception) self.log("File send failed: {e}", e=failure) self.send_file_failure() d.addCallbacks(on_success, on_failure) def on_leave_sending_file(self, _): self.transfer_file = None self.file_producer.stopProducing() self.file_producer = None self.setLineMode() def open_transfer_file_write(self, transfer): try: fp = open(transfer.path, 'wb') except (OSError, IOError) as e: self.log("Failed to open file for writing: {e}", e=e) return False try: self.file_consumer = FileConsumer(fp, transfer.size) self.transfer_file = transfer except (IOError, OSError) as e: self.log("Failed allocating {size} byte file for transfer: {e}", size=transfer.size, e=e) fp.close() return False return True def on_before_receive_file(self, event): transfer = event.args[0] path = os.path.join(self.receive_path, transfer.name) def generate_unique_path(initial_path): filename, ext = os.path.splitext(initial_path) for n in itertools.count(): yield "{0}-{1}{2}".format(filename, n, ext) if os.path.exists(path): for path in generate_unique_path(path): if not os.path.exists(path): break transfer = transfer._replace(path=path) if not self.open_transfer_file_write(transfer): self.write_response({'result': 'rejected'}) return False self.log("Receiving file as {path}", path=path) self.send_response({'result': 'confirmed'}) def on_enter_receiving_file(self, _): assert self.transfer_file is not None assert self.file_consumer is not None self.setRawMode() d = self.file_consumer.registerProducer(self, streaming=True) def on_success(_): self.log("File received successfully") self.receive_file_success() def on_failure(failure): failure.trap(Exception) self.log("File receive failed: {e}", e=failure) self.receive_file_failure() d.addCallbacks(on_success, on_failure) def on_leave_receiving_file(self, _): self.transfer_file = None self.file_consumer.finish() self.file_consumer = None self.setLineMode() def connectionMade(self): if self.is_initiator: self.connect() def connectionLost(self, reason=connectionDone): self.cancel_transition() self.disconnect() def on_enter_done(self, _): if self.current == 'receiving_file': self.receive_file_failure() elif self.current == 'sending_file': self.send_file_failure() if not self.transport_connected: return self.send_message(Quit()).addBoth( self.transport.loseConnection)
class BlobDecryptor(object): """ Decrypts an encrypted blob associated with a given Document. Will raise an exception if the blob doesn't have the expected structure, or if the GCM tag doesn't verify. """ def __init__(self, doc_info, ciphertext_fd, result=None, secret=None, armor=True, start_stream=True, tag=None): if not secret: raise EncryptionDecryptionError('no secret given') self.doc_id = doc_info.doc_id self.rev = doc_info.rev self.fd = ciphertext_fd self.armor = armor self._producer = None self.result = result or BytesIO() sym_key = _get_sym_key_for_doc(doc_info.doc_id, secret) self.size = None self.tag = None preamble, iv = self._consume_preamble() soledad_assert(preamble) soledad_assert(iv) self._aes = AESWriter(sym_key, iv, self.result, tag=tag or self.tag) self._aes.authenticate(preamble) if start_stream: self._start_stream() @property def decrypted_content_size(self): return self._aes.written def _start_stream(self): self._producer = FileBodyProducer(self.fd, readSize=2**16) def _consume_preamble(self): """ Consume the preamble and write remaining bytes as ciphertext. This function is called during a stream and can be holding both, so we need to consume only preamble and store the remaining. """ self.fd.seek(0) try: parts = self.fd.getvalue().split(SEPARATOR, 1) encoded_preamble = base64.urlsafe_b64decode(parts[0]) if len(parts) == 2: ciphertext = parts[1] if self.armor: ciphertext = base64.urlsafe_b64decode(ciphertext) self.tag, ciphertext = ciphertext[-16:], ciphertext[:-16] self.fd.seek(0) self.fd.write(ciphertext) self.fd.seek(len(ciphertext)) self.fd.truncate() self.fd.seek(0) except (TypeError, ValueError): raise InvalidBlob try: preamble = decode_preamble(encoded_preamble) except InvalidPreambleException as e: raise InvalidBlob(e) if preamble.magic != MAGIC: raise InvalidBlob # TODO check timestamp. Just as a sanity check, but for instance # we can refuse to process something that is in the future or # too far in the past (1984 would be nice, hehe) if preamble.scheme != ENC_SCHEME.symkey: raise EncryptionSchemeNotImplementedException(preamble.scheme) if preamble.method != ENC_METHOD.aes_256_gcm: method = preamble.method raise InvalidBlob('Invalid encryption scheme: %s' % method) if preamble.rev != self.rev: rev = preamble.rev msg = 'Invalid revision. Expected: %s, was: %s' % (self.rev, rev) raise InvalidBlob(msg) if preamble.doc_id != self.doc_id: msg = 'Invalid doc_id. ' + 'Expected: %s, was: %s' % (self.doc_id, preamble.doc_id) raise InvalidBlob(msg) return encoded_preamble, preamble.iv def _end_stream(self): try: self._aes.end() except InvalidTag: raise InvalidBlob('Invalid Tag. Blob authentication failed.') fd = self.result fd.seek(0) return self.result def decrypt(self): """ Starts producing encrypted data from the cleartext data. :return: A deferred which will be fired when encryption ends and whose callback will be invoked with the resulting ciphertext. :rtype: twisted.internet.defer.Deferred """ d = self.startProducing() d.addCallback(lambda _: self._end_stream()) return d def startProducing(self): if not self._producer: self._start_stream() return self._producer.startProducing(self._aes) def endStream(self): return self._end_stream() def write(self, data): self._aes.write(data) def close(self): result = self._aes.end() return result
def _start_stream(self): self._producer = FileBodyProducer(self.fd, readSize=2**16)
class BlobEncryptor(object): """ Produces encrypted data from the cleartext data associated with a given Document using AES-256 cipher in GCM mode. The production happens using a Twisted's FileBodyProducer, which uses a Cooperator to schedule calls and can be paused/resumed. Each call takes at most 65536 bytes from the input. Both the production input and output are file descriptors, so they can be applied to a stream of data. """ # TODO # This class needs further work to allow for proper streaming. # Right now we HAVE TO WAIT until the end of the stream before encoding the # result. It should be possible to do that just encoding the chunks and # passing them to a sink, but for that we have to encode the chunks at # proper alignment (3 bytes?) with b64 if armor is defined. def __init__(self, doc_info, content_fd, secret=None, armor=True, sink=None): if not secret: raise EncryptionDecryptionError('no secret given') self.doc_id = doc_info.doc_id self.rev = doc_info.rev self.armor = armor self._content_fd = content_fd self._content_size = self._get_rounded_size(content_fd) self._producer = FileBodyProducer(content_fd, readSize=2**16) self.sym_key = _get_sym_key_for_doc(doc_info.doc_id, secret) self._aes = AESWriter(self.sym_key, _buffer=sink) self._aes.authenticate(self._encode_preamble()) def _get_rounded_size(self, fd): """ Returns a rounded value in order to minimize information leaks due to the original size being exposed. """ fd.seek(0, os.SEEK_END) size = _ceiling(fd.tell()) fd.seek(0) return size @property def iv(self): return self._aes.iv @property def tag(self): return self._aes.tag def encrypt(self): """ Starts producing encrypted data from the cleartext data. :return: A deferred which will be fired when encryption ends and whose callback will be invoked with the resulting ciphertext. :rtype: twisted.internet.defer.Deferred """ # XXX pass a sink to aes? d = self._producer.startProducing(self._aes) d.addCallback(lambda _: self._end_crypto_stream_and_encode_result()) return d def _encode_preamble(self): scheme = ENC_SCHEME.symkey method = ENC_METHOD.aes_256_gcm content_size = self._content_size return Preamble(self.doc_id, self.rev, scheme, method, iv=self.iv, content_size=content_size).encode() def _end_crypto_stream_and_encode_result(self): # TODO ---- this needs to be refactored to allow PROPER streaming # We should write the preamble as soon as possible, # Is it possible to write the AES stream as soon as it is encrypted by # chunks? # FIXME also, it needs to be able to encode chunks with base64 if armor raw_preamble, encrypted = self._aes.end() result = BytesIO() result.write( base64.urlsafe_b64encode(raw_preamble)) result.write(SEPARATOR) if self.armor: result.write( base64.urlsafe_b64encode(encrypted + self.tag)) else: result.write(encrypted + self.tag) result.seek(0) return defer.succeed(result)
def stopProducing(self): try: FileBodyProducer.stopProducing(self) except task.TaskStopped: pass
def __init__(self, value): self._value = value FileBodyProducer.__init__(self, BytesIO(value))
def registerWithConsumer(consumer): producer = FileBodyProducer(fobj, **kw) d = producer.startProducing(consumer) d.addCallback(lambda ign: consumer.unregisterProducer()) d.addErrback(log.err, 'error producing file body') consumer.registerProducer(producer, True)
def __init__(self, string): if isinstance(string, UnicodeType): string = string.encode(CHARSET) FileBodyProducer.__init__(self, StringIO(string))