Esempio n. 1
0
    def make_buffer_get_request(self,
                                expected_status=200,
                                expected_count=None,
                                **get_params):
        """Make a GET request to the buffer endpoint. Warning: Deletes the local buffer instances before validating."""

        response = self.client.get(reverse('buffers-list'),
                                   get_params,
                                   format='json')

        self.assertEqual(response.status_code, expected_status)

        if expected_status == 200:

            t_id = get_params.get("transfer_session_id")

            if expected_count is None:
                expected_count = Buffer.objects.filter(
                    transfer_session_id=t_id).count()

            # load the returned data from JSON
            data = json.loads(response.content.decode())

            # parse out the results from a paginated set, if needed
            if isinstance(data, dict) and "results" in data:
                data = data["results"]

            # deserialize the records
            serialized_recs = BufferSerializer(data=data, many=True)

            # delete "local" buffer records to avoid uniqueness constraint failures in validation
            Buffer.objects.filter(
                transfer_session_id=t_id,
                model_uuid__in=[d["model_uuid"] for d in data]).delete()

            # ensure the push records validate
            self.assertTrue(serialized_recs.is_valid())

            # check that the correct number of buffer items was returned
            self.assertEqual(expected_count,
                             len(serialized_recs.validated_data))

            # check that the correct number of buffer items was returned
            for data in serialized_recs.validated_data:
                self.assertEqual(3, len(data["rmcb_list"]))

            return serialized_recs
Esempio n. 2
0
    def _pull_records(self, chunk_size=500, callback=None):
        while self.current_transfer_session.records_transferred < self.current_transfer_session.records_total:
            buffers_resp = self.sync_connection._pull_record_chunk(chunk_size, self.current_transfer_session)

            # load the returned data from JSON
            data = buffers_resp.json()

            # parse out the results from a paginated set, if needed
            if isinstance(data, dict) and "results" in data:
                data = data["results"]

            # deserialize the records
            serialized_recs = BufferSerializer(data=data, many=True)

            # validate records
            if serialized_recs.is_valid(raise_exception=True):
                serialized_recs.save()

            # update the size of the records transferred
            self.current_transfer_session.records_transferred += chunk_size
            self.current_transfer_session.save()
Esempio n. 3
0
    def _push_records(self, chunk_size=500, callback=None):
        # paginate buffered records so we do not load them all into memory
        buffered_records = Buffer.objects.filter(transfer_session=self.current_transfer_session)
        buffered_pages = Paginator(buffered_records, chunk_size)
        for count in buffered_pages.page_range:

            # serialize and send records to server
            serialized_recs = BufferSerializer(buffered_pages.page(count).object_list, many=True)
            self.sync_connection._push_record_chunk(serialized_recs.data)

            # update records_transferred upon successful request
            self.current_transfer_session.records_transferred += chunk_size
            self.current_transfer_session.save()
Esempio n. 4
0
    def make_buffer_post_request(self, buffers, expected_status=201):
        serialized_recs = BufferSerializer(buffers, many=True)

        # extract that data that is to be posted
        data = serialized_recs.data

        # delete the records from the DB so we don't conflict when we POST
        Buffer.objects.all().delete()
        RecordMaxCounterBuffer.objects.all().delete()

        response = self.client.post(reverse('buffers-list'),
                                    data,
                                    format='json')
        self.assertEqual(response.status_code, expected_status)
        if expected_status == 201:
            # check that the buffer items were created
            self.assertEqual(Buffer.objects.count(), len(buffers))
        else:
            # check that the buffer items were not created
            self.assertEqual(Buffer.objects.count(), 0)
Esempio n. 5
0
    def build_buffer_items(self, transfer_session, **kwargs):

        data = {
            "profile":
            kwargs.get("profile", 'facilitydata'),
            "serialized":
            kwargs.get("serialized", '{"test": 99}'),
            "deleted":
            kwargs.get("deleted", False),
            "last_saved_instance":
            kwargs.get("last_saved_instance",
                       uuid.uuid4().hex),
            "last_saved_counter":
            kwargs.get("last_saved_counter", 179),
            "partition":
            kwargs.get("partition", 'partition'),
            "source_id":
            kwargs.get("source_id",
                       uuid.uuid4().hex),
            "model_name":
            kwargs.get("model_name", "contentsummarylog"),
            "conflicting_serialized_data":
            kwargs.get("conflicting_serialized_data", ""),
            "model_uuid":
            kwargs.get("model_uuid", None),
            "transfer_session":
            transfer_session,
        }

        for i in range(self.chunk_size):
            data['source_id'] = uuid.uuid4().hex
            data["model_uuid"] = SummaryLog.compute_namespaced_id(
                data["partition"], data["source_id"], data["model_name"])
            Buffer.objects.create(**data)

        buffered_items = Buffer.objects.filter(
            transfer_session=self.syncclient.current_transfer_session)
        serialized_records = BufferSerializer(buffered_items, many=True)
        return json.dumps(serialized_records.data)
Esempio n. 6
0
    def _push_records(self, callback=None):
        # paginate buffered records so we do not load them all into memory
        buffered_records = Buffer.objects.filter(
            transfer_session=self.current_transfer_session).order_by("pk")
        buffered_pages = Paginator(buffered_records, self.chunk_size)

        for count in buffered_pages.page_range:
            # serialize and send records to server
            serialized_recs = BufferSerializer(
                buffered_pages.page(count).object_list, many=True)
            self.sync_connection._push_record_chunk(serialized_recs.data)
            # update records_transferred upon successful request
            self.current_transfer_session.records_transferred = min(
                self.current_transfer_session.records_transferred +
                self.chunk_size,
                self.current_transfer_session.records_total,
            )
            self.current_transfer_session.bytes_sent = self.sync_connection.bytes_sent
            self.current_transfer_session.bytes_received = (
                self.sync_connection.bytes_received)
            self.current_transfer_session.save()

            if callback is not None:
                callback()