Ejemplo n.º 1
0
    def HandleUploads(self):
        """Receive file uploads from the client."""
        if self.headers.get("Transfer-Encoding") != "chunked":
            raise IOError("Only chunked uploads are allowed.")

        # Extract request parameters.
        client_hmac = self.headers.get("x-grr-hmac")
        if not client_hmac:
            raise IOError("HMAC not provided")

        policy = self.headers.get("x-grr-policy")
        if not policy:
            raise IOError("Policy not provided")

        client_hmac = client_hmac.decode("base64")
        serialized_policy = policy.decode("base64")

        # Ensure the HMAC verifies.
        transfer.GetHMAC().Verify(serialized_policy, client_hmac)

        policy = rdf_client.UploadPolicy.FromSerializedString(
            serialized_policy)
        if rdfvalue.RDFDatetime.Now() > policy.expires:
            raise IOError("Client upload policy is too old.")

        upload_store = file_store.UploadFileStore.GetPlugin(
            config_lib.CONFIG["Frontend.upload_store"])()

        out_fd = upload_store.open_for_writing(policy.client_id,
                                               policy.filename)
        with uploads.DecryptStream(config_lib.CONFIG["PrivateKeys.server_key"],
                                   self._GetClientPublicKey(policy.client_id),
                                   out_fd) as decrypt_fd:
            total_size = 0

            # Handle chunked encoding:
            # https://www.w3.org/Protocols/rfc2616/rfc2616-sec3.html#sec3.6.1
            while 1:
                line = self.rfile.readline()
                # We do not support chunked extensions, just ignore them.
                chunk_size = int(line.split(";")[0], 16)
                if chunk_size == 0:
                    break

                # Copy the chunk into the file store.
                self._CopyBytes(self.rfile, decrypt_fd, chunk_size)
                total_size += chunk_size

                # Chunk is followed by \r\n.
                lf = self.rfile.read(2)
                if lf != "\r\n":
                    raise IOError("Unable to parse chunk.")

            # Skip entity headers.
            for header in self.rfile.readline():
                if not header:
                    break

            # The file is all here.
            self.Send("Success: Uploaded %s" % policy.filename)
Ejemplo n.º 2
0
    def HandleUpload(self, encoding_header, encoded_policy,
                     encoded_client_hmac, data_generator):
        """Handles the upload of a file."""
        if encoding_header != "chunked":
            raise IOError("Only chunked uploads are allowed.")

        # Extract request parameters.
        if not encoded_client_hmac:
            raise IOError("HMAC not provided")

        if not encoded_policy:
            raise IOError("Policy not provided")

        client_hmac = encoded_client_hmac.decode("base64")
        serialized_policy = encoded_policy.decode("base64")

        transfer.GetHMAC().Verify(serialized_policy, client_hmac)

        policy = rdf_client.UploadPolicy.FromSerializedString(
            serialized_policy)
        if rdfvalue.RDFDatetime.Now() > policy.expires:
            raise IOError("Client upload policy is too old.")

        upload_store = file_store.UploadFileStore.GetPlugin(
            config_lib.CONFIG["Frontend.upload_store"])()

        out_fd = upload_store.open_for_writing(policy.client_id,
                                               policy.filename)
        with uploads.DecryptStream(config_lib.CONFIG["PrivateKeys.server_key"],
                                   self._GetClientPublicKey(policy.client_id),
                                   out_fd) as decrypt_fd:
            for data in data_generator:
                decrypt_fd.write(data)
Ejemplo n.º 3
0
    def setUp(self):
        super(UploadTests, self).setUp()
        self.readers_private_key = crypto.RSAPrivateKey().GenerateKey()
        self.writers_private_key = crypto.RSAPrivateKey().GenerateKey()

        self.test_string = "Hello world" * 500

        self.infd = StringIO.StringIO(self.test_string)
        self.outfd = StringIO.StringIO()

        self.encrypt_wrapper = uploads.EncryptStream(
            readers_public_key=self.readers_private_key.GetPublicKey(),
            writers_private_key=self.writers_private_key,
            fd=self.infd,
            chunk_size=1024)

        self.decrypt_wrapper = uploads.DecryptStream(
            readers_private_key=self.readers_private_key,
            writers_public_key=self.writers_private_key.GetPublicKey(),
            outfd=self.outfd)
Ejemplo n.º 4
0
    def HandleUpload(self, encoding_header, encoded_upload_token,
                     data_generator):
        """Handles the upload of a file."""
        if encoding_header != "chunked":
            raise IOError("Only chunked uploads are allowed.")

        # Extract request parameters.
        if not encoded_upload_token:
            raise IOError("Upload token not provided")

        upload_token = rdf_client.UploadToken.FromSerializedString(
            encoded_upload_token.decode("base64"))

        if not upload_token.hmac:
            raise IOError("HMAC not provided")

        if not upload_token.encrypted_policy:
            raise IOError("Policy not provided")

        if not upload_token.iv:
            raise IOError("IV not provided")

        upload_token.VerifyHMAC()

        policy = rdf_client.UploadPolicy.FromEncryptedPolicy(
            upload_token.encrypted_policy, upload_token.iv)

        if rdfvalue.RDFDatetime.Now() > policy.expires:
            raise IOError("Client upload policy is too old.")

        upload_store = file_store.UploadFileStore.GetPlugin(
            config.CONFIG["Frontend.upload_store"])()

        filestore_fd = upload_store.CreateFileStoreFile()
        out_fd = uploads.GunzipWrapper(filestore_fd)
        with uploads.DecryptStream(config.CONFIG["PrivateKeys.server_key"],
                                   self._GetClientPublicKey(policy.client_id),
                                   out_fd) as decrypt_fd:
            for data in data_generator:
                decrypt_fd.write(data)
        return filestore_fd.Finalize()
Ejemplo n.º 5
0
    def testKeyMismatch(self):
        """Checks the performance impact of reusing stolen upload tokens.

    Upload policies are HMAC'd by the server only so they can be
    grabbed from the wire and reused to perform a DOS attack. To limit
    the impact of this attack, we need to bail out as soon as possible
    once we realize we are handed a stream that was not encrypted with
    the client key that is indicated in the policy.
    """
        encrypted_data = self.encrypt_wrapper.read(1024 * 1024 * 100)

        wrong_key = crypto.RSAPrivateKey().GenerateKey()
        decrypt_wrapper = uploads.DecryptStream(
            readers_private_key=self.readers_private_key,
            writers_public_key=wrong_key.GetPublicKey(),
            outfd=self.outfd)

        # We should know after very few bytes that the key is wrong. The
        # first encrypted chunk is the serialized signature which is 518
        # bytes in the test. Adding crypto headers gives a chunk size of
        # 570. After 600 bytes we should definitely bail out.
        with self.assertRaises(crypto.VerificationError):
            decrypt_wrapper.write(encrypted_data[:600])