Beispiel #1
0
    def test_instance_id_caching(self, *args):
        """
        Ensure that the cache works but that clearing it works as well.
        """

        with EnvironmentVarGuard() as env:

            env["MORANGO_SYSTEM_ID"] = "oldmagicsysid"

            old_instance, created = InstanceIDModel.get_or_create_current_instance(clear_cache=True)
            self.assertTrue(created)

            env["MORANGO_SYSTEM_ID"] = "newmagicsysid"

            cached_instance, created = InstanceIDModel.get_or_create_current_instance()
            self.assertFalse(created)

            uncached_instance, created = InstanceIDModel.get_or_create_current_instance(clear_cache=True)
            self.assertTrue(created)

            recached_instance, created = InstanceIDModel.get_or_create_current_instance()
            self.assertFalse(created)

            self.assertEqual(old_instance.id, cached_instance.id)
            self.assertNotEqual(old_instance.id, uncached_instance.id)
            self.assertEqual(uncached_instance.id, recached_instance.id)
Beispiel #2
0
def _dequeue_into_store(transfersession):
    """
    Takes data from the buffers and merges into the store and record max counters.

    ALGORITHM: Incrementally insert and delete on a case by case basis to ensure subsequent cases
    are not effected by previous cases.
    """
    with connection.cursor() as cursor:
        DBBackend._dequeuing_delete_rmcb_records(cursor, transfersession.id)
        DBBackend._dequeuing_delete_buffered_records(cursor,
                                                     transfersession.id)
        current_id = InstanceIDModel.get_current_instance_and_increment_counter(
        )
        DBBackend._dequeuing_merge_conflict_buffer(cursor, current_id,
                                                   transfersession.id)
        DBBackend._dequeuing_merge_conflict_rmcb(cursor, transfersession.id)
        DBBackend._dequeuing_update_rmcs_last_saved_by(cursor, current_id,
                                                       transfersession.id)
        DBBackend._dequeuing_delete_mc_rmcb(cursor, transfersession.id)
        DBBackend._dequeuing_delete_mc_buffer(cursor, transfersession.id)
        DBBackend._dequeuing_insert_remaining_buffer(cursor,
                                                     transfersession.id)
        DBBackend._dequeuing_insert_remaining_rmcb(cursor, transfersession.id)
        DBBackend._dequeuing_delete_remaining_rmcb(cursor, transfersession.id)
        DBBackend._dequeuing_delete_remaining_buffer(cursor,
                                                     transfersession.id)
    if getattr(settings, "MORANGO_DESERIALIZE_AFTER_DEQUEUING", True):
        # we first serialize to avoid deserialization merge conflicts
        filter = transfersession.get_filter()
        _serialize_into_store(transfersession.sync_session.profile,
                              filter=filter)
        _deserialize_from_store(transfersession.sync_session.profile,
                                filter=filter)
 def setUp(self):
     (self.current_id, _) = InstanceIDModel.get_or_create_current_instance()
     self.range = 10
     self.mc = MorangoProfileController("facilitydata")
     for i in range(self.range):
         self.ident = uuid.uuid4().hex
         StoreModelFacilityFactory(
             pk=self.ident, serialized=serialized_facility_factory(self.ident)
         )
Beispiel #4
0
 def setUp(self):
     self.session = SyncSession.objects.create(
         id=uuid.uuid4().hex,
         profile="facilitydata",
         last_activity_timestamp=timezone.now(),
     )
     self.transfer_session = TransferSession.objects.create(
         id=uuid.uuid4().hex,
         sync_session=self.session,
         filter="partition",
         push=True,
         last_activity_timestamp=timezone.now(),
         records_total=3,
     )
     self.chunk_size = 3
     self.conn = NetworkSyncConnection(base_url=self.live_server_url)
     self.syncclient = self.build_client(BaseSyncClient)
     InstanceIDModel.get_or_create_current_instance()
Beispiel #5
0
 def retrieve(self, request, pk=None):
     (id_model, _) = InstanceIDModel.get_or_create_current_instance()
     m_info = {
         "instance_hash": id_model.get_proquint(),
         "instance_id": id_model.id,
         "system_os": platform.system(),
         "version": morango.__version__,
         "capabilities": CAPABILITIES,
     }
     return response.Response(m_info)
Beispiel #6
0
    def test_same_node_id(self):
        with mock.patch(
            "uuid.getnode", return_value=67002173923623
        ):  # fake (random) address
            (IDModel, _) = InstanceIDModel.get_or_create_current_instance()
            ident = IDModel.id

        with mock.patch(
            "uuid.getnode", return_value=69002173923623
        ):  # fake (random) address
            (IDModel, _) = InstanceIDModel.get_or_create_current_instance()

        with mock.patch(
            "uuid.getnode", return_value=67002173923623
        ):  # fake (random) address
            (IDModel, _) = InstanceIDModel.get_or_create_current_instance()

        self.assertFalse(
            InstanceIDModel.objects.exclude(id=ident).filter(current=True).exists()
        )
        self.assertTrue(InstanceIDModel.objects.get(id=ident).current)
 def test_dequeuing_update_rmcs_last_saved_by(self):
     self.assertFalse(
         RecordMaxCounter.objects.filter(
             instance_id=self.current_id.id).exists())
     with connection.cursor() as cursor:
         current_id = InstanceIDModel.get_current_instance_and_increment_counter(
         )
         DBBackend._dequeuing_update_rmcs_last_saved_by(
             cursor, current_id,
             self.data["sc"].current_transfer_session.id)
     self.assertTrue(
         RecordMaxCounter.objects.filter(
             instance_id=current_id.id).exists())
 def test_dequeuing_merge_conflict_hard_delete(self):
     store = Store.objects.get(id=self.data["model7"])
     self.assertEqual(store.serialized, "store")
     self.assertEqual(store.conflicting_serialized_data, "store")
     with connection.cursor() as cursor:
         current_id = InstanceIDModel.get_current_instance_and_increment_counter(
         )
         DBBackend._dequeuing_merge_conflict_buffer(
             cursor, current_id,
             self.data["sc"].current_transfer_session.id)
     store.refresh_from_db()
     self.assertEqual(store.serialized, "")
     self.assertEqual(store.conflicting_serialized_data, "")
 def test_dequeuing_merge_conflict_buffer_rmcb_less_rmc(self):
     store = Store.objects.get(id=self.data["model5"])
     self.assertNotEqual(store.last_saved_instance, self.current_id.id)
     self.assertEqual(store.conflicting_serialized_data, "store")
     with connection.cursor() as cursor:
         current_id = InstanceIDModel.get_current_instance_and_increment_counter(
         )
         DBBackend._dequeuing_merge_conflict_buffer(
             cursor, current_id,
             self.data["sc"].current_transfer_session.id)
     store = Store.objects.get(id=self.data["model5"])
     self.assertEqual(store.last_saved_instance, current_id.id)
     self.assertEqual(store.last_saved_counter, current_id.counter)
     self.assertEqual(store.conflicting_serialized_data, "buffer\nstore")
    def test_last_saved_instance_updates(self):
        FacilityModelFactory(name=self.original_name)
        self.mc.serialize_into_store()
        old_instance_id = Store.objects.first().last_saved_instance

        with EnvironmentVarGuard() as env:
            env['MORANGO_SYSTEM_ID'] = 'new_sys_id'
            (new_id, _) = InstanceIDModel.get_or_create_current_instance(clear_cache=True)

            Facility.objects.all().update(name=self.new_name)
            self.mc.serialize_into_store()
            new_instance_id = Store.objects.first().last_saved_instance

        self.assertNotEqual(old_instance_id, new_instance_id)
        self.assertEqual(new_instance_id, new_id.id)
    def test_new_rmc_for_existing_model(self):
        with EnvironmentVarGuard() as env:
            env['MORANGO_SYSTEM_ID'] = 'new_sys_id'
            (new_id, _) = InstanceIDModel.get_or_create_current_instance(clear_cache=True)

            Facility.objects.update(name="facility")
            self.mc.serialize_into_store()

        new_rmc = RecordMaxCounter.objects.get(
            instance_id=new_id.id, store_model_id=self.fac1.id
        )
        new_store_record = Store.objects.get(id=self.fac1.id)

        self.assertEqual(new_rmc.counter, new_store_record.last_saved_counter)
        self.assertEqual(new_rmc.instance_id, new_store_record.last_saved_instance)
Beispiel #12
0
    def test_consistent_with_0_4_instance_id_calculation(self, *args):
        """
        This test ensures that we don't accidentally make changes that impact how we calculate
        the instance ID, in a way that would cause instance IDs to change when they shouldn't.
        """

        from morango.models.utils import _get_database_path

        sys.version = "2.7.333"

        DatabaseIDModel.objects.all().update(current=False)
        database_id = DatabaseIDModel.objects.create(
            id="6fe445b75cea11858c00fb97bdee8878", current=True
        ).id

        node_id = hashlib.sha1(
            "{}:{}".format(database_id, 24359248572014).encode("utf-8")
        ).hexdigest()[:20]

        target = {
            "platform": "Windows 3.1",
            "hostname": "myhost",
            "sysversion": "2.7.333",
            "node_id": node_id,
            "database_id": database_id,
            "db_path": _get_database_path(),
        }

        result = get_0_4_system_parameters(database_id)

        self.assertEqual(target, result)

        calculated_id = _calculate_0_4_uuid(result)

        self.assertEqual(calculated_id, "4480fda04236975d0895c0048b767647")

        InstanceIDModel.objects.all().delete()

        InstanceIDModel.objects.create(current=True, id=calculated_id, **result)

        instance, _ = InstanceIDModel.get_or_create_current_instance()

        self.assertEqual(calculated_id, instance.id)
Beispiel #13
0
    def test_consistent_0_5_instance_id(self, *args):
        """
        If this test fails, it means we've changed the way Instance IDs are calculated in an undesirable way.
        """

        with EnvironmentVarGuard() as env:
            env["MORANGO_SYSTEM_ID"] = "magicsysid"

            DatabaseIDModel.objects.all().update(current=False)
            database_id = DatabaseIDModel.objects.create(
                id="7fe445b75cea11858c00fb97bdee8878", current=True
            ).id

            self.assertEqual(get_0_5_system_id(), "54940f560a55bbf7d86b")
            self.assertEqual(get_0_5_mac_address(), "804f4c20d3b2b5a29b95")

            instance, _ = InstanceIDModel.get_or_create_current_instance(clear_cache=True)

            self.assertEqual(instance.id, "e45c06595d820f4581e0c82930359592")
    def setUp(self):
        settings.MORANGO_DESERIALIZE_AFTER_DEQUEUING = False
        self.data = {}
        DatabaseIDModel.objects.create()
        (self.current_id, _) = InstanceIDModel.get_or_create_current_instance()

        # create controllers for app/store/buffer operations
        self.data["mc"] = MorangoProfileController("facilitydata")
        self.data["sc"] = BaseSyncClient(None, "host")
        session = SyncSession.objects.create(
            id=uuid.uuid4().hex,
            profile="",
            last_activity_timestamp=timezone.now())
        self.data[
            "sc"].current_transfer_session = TransferSession.objects.create(
                id=uuid.uuid4().hex,
                sync_session=session,
                push=True,
                last_activity_timestamp=timezone.now(),
            )
        self.data.update(
            create_buffer_and_store_dummy_data(
                self.data["sc"].current_transfer_session.id))
Beispiel #15
0
    def test_envvar_overrides(self, *args):

        with EnvironmentVarGuard() as env:
            env["MORANGO_SYSTEM_ID"] = "magicsysid"
            env["MORANGO_NODE_ID"] = "magicnodeid"

            DatabaseIDModel.objects.all().update(current=False)
            database_id = DatabaseIDModel.objects.create(
                id="7fe445b75cea11858c00fb97bdee8878", current=True
            ).id

            system_id = get_0_5_system_id()
            node_id = get_0_5_mac_address()

            self.assertEqual(system_id, "54940f560a55bbf7d86b")
            self.assertEqual(node_id, "9ed21d0fb4dacfa4009d")

            instance, _ = InstanceIDModel.get_or_create_current_instance(clear_cache=True)

            self.assertEqual(instance.id, "9033c0cec24d8a8d906dcba416f77625")

            expected_id = sha2_uuid(database_id, system_id, node_id)

            self.assertEqual(instance.id, expected_id)
Beispiel #16
0
 def setUp(self):
     InstanceIDModel.get_or_create_current_instance()
     [FacilityModelFactory() for _ in range(10)]
     self.mc = MorangoProfileController('facilitydata')
     self.mc.serialize_into_store()
Beispiel #17
0
def _serialize_into_store(profile, filter=None):
    """
    Takes data from app layer and serializes the models into the store.

    ALGORITHM: On a per syncable model basis, we iterate through each class model and we go through 2 possible cases:

    1. If there is a store record pertaining to that app model, we update the serialized store record with
    the latest changes from the model's fields. We also update the counter's based on this device's current Instance ID.
    2. If there is no store record for this app model, we proceed to create an in memory store model and append to a list to be
    bulk created on a per class model basis.
    """
    # ensure that we write and retrieve the counter in one go for consistency
    current_id = InstanceIDModel.get_current_instance_and_increment_counter()

    with transaction.atomic(using=USING_DB):
        # create Q objects for filtering by prefixes
        prefix_condition = None
        if filter:
            prefix_condition = functools.reduce(
                lambda x, y: x | y,
                [
                    Q(_morango_partition__startswith=prefix)
                    for prefix in filter
                ],
            )

        # filter through all models with the dirty bit turned on
        for model in syncable_models.get_models(profile):
            new_store_records = []
            new_rmc_records = []
            klass_queryset = model.objects.filter(_morango_dirty_bit=True)
            if prefix_condition:
                klass_queryset = klass_queryset.filter(prefix_condition)
            store_records_dict = Store.objects.in_bulk(
                id_list=klass_queryset.values_list("id", flat=True))
            for app_model in klass_queryset:
                try:
                    store_model = store_records_dict[app_model.id]

                    # if store record dirty and app record dirty, append store serialized to conflicting data
                    if store_model.dirty_bit:
                        store_model.conflicting_serialized_data = (
                            store_model.serialized + "\n" +
                            store_model.conflicting_serialized_data)
                        store_model.dirty_bit = False

                    # set new serialized data on this store model
                    ser_dict = json.loads(store_model.serialized)
                    ser_dict.update(app_model.serialize())
                    store_model.serialized = DjangoJSONEncoder().encode(
                        ser_dict)

                    # create or update instance and counter on the record max counter for this store model
                    RecordMaxCounter.objects.update_or_create(
                        defaults={"counter": current_id.counter},
                        instance_id=current_id.id,
                        store_model_id=store_model.id,
                    )

                    # update last saved bys for this store model
                    store_model.last_saved_instance = current_id.id
                    store_model.last_saved_counter = current_id.counter
                    # update deleted flags in case it was previously deleted
                    store_model.deleted = False
                    store_model.hard_deleted = False

                    # update this model
                    store_model.save()

                except KeyError:
                    kwargs = {
                        "id":
                        app_model.id,
                        "serialized":
                        DjangoJSONEncoder().encode(app_model.serialize()),
                        "last_saved_instance":
                        current_id.id,
                        "last_saved_counter":
                        current_id.counter,
                        "model_name":
                        app_model.morango_model_name,
                        "profile":
                        app_model.morango_profile,
                        "partition":
                        app_model._morango_partition,
                        "source_id":
                        app_model._morango_source_id,
                    }
                    # check if model has FK pointing to it and add the value to a field on the store
                    self_ref_fk = _self_referential_fk(model)
                    if self_ref_fk:
                        self_ref_fk_value = getattr(app_model, self_ref_fk)
                        kwargs.update(
                            {"_self_ref_fk": self_ref_fk_value or ""})
                    # create store model and record max counter for the app model
                    new_store_records.append(Store(**kwargs))
                    new_rmc_records.append(
                        RecordMaxCounter(
                            store_model_id=app_model.id,
                            instance_id=current_id.id,
                            counter=current_id.counter,
                        ))

            # bulk create store and rmc records for this class
            Store.objects.bulk_create(new_store_records)
            RecordMaxCounter.objects.bulk_create(new_rmc_records)

            # set dirty bit to false for all instances of this model
            klass_queryset.update(update_dirty_bit_to=False)

        # get list of ids of deleted models
        deleted_ids = DeletedModels.objects.filter(
            profile=profile).values_list("id", flat=True)
        # update last_saved_bys and deleted flag of all deleted store model instances
        deleted_store_records = Store.objects.filter(id__in=deleted_ids)
        deleted_store_records.update(
            dirty_bit=False,
            deleted=True,
            last_saved_instance=current_id.id,
            last_saved_counter=current_id.counter,
        )
        # update rmcs counters for deleted models that have our instance id
        RecordMaxCounter.objects.filter(
            instance_id=current_id.id,
            store_model_id__in=deleted_ids).update(counter=current_id.counter)
        # get a list of deleted model ids that don't have an rmc for our instance id
        new_rmc_ids = deleted_store_records.exclude(
            recordmaxcounter__instance_id=current_id.id).values_list("id",
                                                                     flat=True)
        # bulk create these new rmcs
        RecordMaxCounter.objects.bulk_create([
            RecordMaxCounter(
                store_model_id=r_id,
                instance_id=current_id.id,
                counter=current_id.counter,
            ) for r_id in new_rmc_ids
        ])
        # clear deleted models table for this profile
        DeletedModels.objects.filter(profile=profile).delete()

        # handle logic for hard deletion models
        hard_deleted_ids = HardDeletedModels.objects.filter(
            profile=profile).values_list("id", flat=True)
        hard_deleted_store_records = Store.objects.filter(
            id__in=hard_deleted_ids)
        hard_deleted_store_records.update(hard_deleted=True,
                                          serialized="{}",
                                          conflicting_serialized_data="")
        HardDeletedModels.objects.filter(profile=profile).delete()

        # update our own database max counters after serialization
        if not filter:
            DatabaseMaxCounter.objects.update_or_create(
                instance_id=current_id.id,
                partition="",
                defaults={"counter": current_id.counter},
            )
        else:
            for f in filter:
                DatabaseMaxCounter.objects.update_or_create(
                    instance_id=current_id.id,
                    partition=f,
                    defaults={"counter": current_id.counter},
                )
Beispiel #18
0
 def test_only_one_current_instance_ID(self):
     with mock.patch("platform.platform", return_value="platform"):
         InstanceIDModel.get_or_create_current_instance()
     self.assertEqual(len(InstanceIDModel.objects.filter(current=True)), 1)
Beispiel #19
0
    def test_creating_same_instance_ID_model(self):
        firstIDModel = InstanceIDModel.objects.first()
        (secondIDModel, _) = InstanceIDModel.get_or_create_current_instance()

        self.assertEqual(firstIDModel, secondIDModel)
        self.assertEqual(InstanceIDModel.objects.count(), 1)
Beispiel #20
0
 def setUp(self):
     InstanceIDModel.get_or_create_current_instance()
 def setUp(self):
     (self.current_id, _) = InstanceIDModel.get_or_create_current_instance()
     self.mc = MorangoProfileController("facilitydata")
     self.fac1 = FacilityModelFactory(name="school")
     self.mc.serialize_into_store()
     self.old_rmc = RecordMaxCounter.objects.first()
 def setUp(self):
     InstanceIDModel.get_or_create_current_instance()
     self.range = 10
     self.mc = MorangoProfileController("facilitydata")
     self.original_name = "ralphie"
     self.new_name = "rafael"
Beispiel #23
0
    def create_sync_session(self, client_cert, server_cert, chunk_size=500):
        # if server cert does not exist locally, retrieve it from server
        if not Certificate.objects.filter(id=server_cert.id).exists():
            cert_chain_response = self._get_certificate_chain(
                params={"ancestors_of": server_cert.id})

            # upon receiving cert chain from server, we attempt to save the chain into our records
            Certificate.save_certificate_chain(cert_chain_response.json(),
                                               expected_last_id=server_cert.id)

        # request the server for a one-time-use nonce
        nonce_resp = self._get_nonce()
        nonce = nonce_resp.json()["id"]

        # if no hostname then url is actually an ip
        url = urlparse(self.base_url)
        hostname = url.hostname or self.base_url
        port = url.port or (80 if url.scheme == "http" else 443)

        # prepare the data to send in the syncsession creation request
        data = {
            "id":
            uuid.uuid4().hex,
            "server_certificate_id":
            server_cert.id,
            "client_certificate_id":
            client_cert.id,
            "profile":
            client_cert.profile,
            "certificate_chain":
            json.dumps(
                CertificateSerializer(
                    client_cert.get_ancestors(include_self=True),
                    many=True).data),
            "connection_path":
            self.base_url,
            "instance":
            json.dumps(
                InstanceIDSerializer(
                    InstanceIDModel.get_or_create_current_instance()[0]).data),
            "nonce":
            nonce,
            "client_ip":
            _get_client_ip_for_server(hostname, port),
            "server_ip":
            _get_server_ip(hostname),
        }

        # sign the nonce/ID combo to attach to the request
        message = "{nonce}:{id}".format(**data)
        data["signature"] = client_cert.sign(message)

        # Sync Session creation request
        session_resp = self._create_sync_session(data)

        # check that the nonce/id were properly signed by the server cert
        if not server_cert.verify(message,
                                  session_resp.json().get("signature")):
            raise CertificateSignatureInvalid()

        # build the data to be used for creating our own syncsession
        data = {
            "id":
            data["id"],
            "start_timestamp":
            timezone.now(),
            "last_activity_timestamp":
            timezone.now(),
            "active":
            True,
            "is_server":
            False,
            "client_certificate":
            client_cert,
            "server_certificate":
            server_cert,
            "profile":
            client_cert.profile,
            "connection_kind":
            "network",
            "connection_path":
            self.base_url,
            "client_ip":
            data["client_ip"],
            "server_ip":
            data["server_ip"],
            "client_instance":
            json.dumps(
                InstanceIDSerializer(
                    InstanceIDModel.get_or_create_current_instance()[0]).data),
            "server_instance":
            session_resp.json().get("server_instance") or "{}",
        }
        sync_session = SyncSession.objects.create(**data)
        return SyncSessionClient(self, sync_session, chunk_size=chunk_size)
    def test_dequeue_into_store(self):
        _dequeue_into_store(self.data["sc"].current_transfer_session)
        # ensure a record with different transfer session id is not affected
        self.assertTrue(
            Buffer.objects.filter(
                transfer_session_id=self.data["tfs_id"]).exists())
        self.assertFalse(Store.objects.filter(id=self.data["model6"]).exists())
        self.assertFalse(
            RecordMaxCounter.objects.filter(
                store_model_id=self.data["model6"],
                instance_id__in=self.data["model6_rmcb_ids"],
            ).exists())

        # ensure reverse fast forward records are not modified
        self.assertNotEqual(
            Store.objects.get(id=self.data["model1"]).serialized, "buffer")
        self.assertFalse(
            RecordMaxCounter.objects.filter(
                instance_id=self.data["model1_rmcb_ids"][1]).exists())

        # ensure records with merge conflicts are modified
        self.assertEqual(
            Store.objects.get(
                id=self.data["model2"]).conflicting_serialized_data,
            "buffer\nstore",
        )  # conflicting field is overwritten
        self.assertEqual(
            Store.objects.get(
                id=self.data["model5"]).conflicting_serialized_data,
            "buffer\nstore",
        )
        self.assertTrue(
            RecordMaxCounter.objects.filter(
                instance_id=self.data["model2_rmcb_ids"][1]).exists())
        self.assertTrue(
            RecordMaxCounter.objects.filter(
                instance_id=self.data["model5_rmcb_ids"][1]).exists())
        self.assertEqual(
            Store.objects.get(id=self.data["model2"]).last_saved_instance,
            InstanceIDModel.get_or_create_current_instance()[0].id,
        )
        self.assertEqual(
            Store.objects.get(id=self.data["model5"]).last_saved_instance,
            InstanceIDModel.get_or_create_current_instance()[0].id,
        )

        # ensure fast forward records are modified
        self.assertEqual(
            Store.objects.get(id=self.data["model3"]).serialized,
            "buffer")  # serialized field is overwritten
        self.assertTrue(
            RecordMaxCounter.objects.filter(
                instance_id=self.data["model3_rmcb_ids"][1]).exists())
        self.assertEqual(
            Store.objects.get(id=self.data["model3"]).last_saved_instance,
            self.data["model3_rmcb_ids"][1],
        )  # last_saved_by is updated
        self.assertEqual(
            RecordMaxCounter.objects.get(
                instance_id=self.data["model3_rmcb_ids"][0],
                store_model_id=self.data["model3"],
            ).counter,
            3,
        )

        # ensure all buffer and rmcb records were deleted for this transfer session id
        self.assertFalse(
            Buffer.objects.filter(transfer_session_id=self.data["sc"].
                                  current_transfer_session.id).exists())
        self.assertFalse(
            RecordMaxCounterBuffer.objects.filter(
                transfer_session_id=self.data["sc"].current_transfer_session.id
            ).exists())
 def setUp(self):
     (self.current_id, _) = InstanceIDModel.get_or_create_current_instance()
     self.mc = MorangoProfileController("facilitydata")
Beispiel #26
0
def create_dummy_store_data():
    data = {}
    DatabaseIDModel.objects.create()
    data["group1_id"] = InstanceIDModel.get_or_create_current_instance()[
        0]  # counter is at 0

    # create controllers for app/store/buffer operations
    data["mc"] = MorangoProfileController("facilitydata")
    data["sc"] = BaseSyncClient(None, "host")
    session = SyncSession.objects.create(
        id=uuid.uuid4().hex,
        profile="facilitydata",
        last_activity_timestamp=timezone.now(),
    )
    data["sc"].current_transfer_session = TransferSession.objects.create(
        id=uuid.uuid4().hex,
        sync_session=session,
        push=True,
        last_activity_timestamp=timezone.now(),
    )

    data["mc"].serialize_into_store()  # counter is at 1
    # create group of facilities and first serialization
    data["group1_c1"] = [FacilityFactory() for _ in range(5)]
    data["mc"].serialize_into_store()  # counter is at 2

    # create group of facilities and second serialization
    data["group1_c2"] = [FacilityFactory() for _ in range(5)]

    # create users and logs associated with user
    data["user1"] = MyUser.objects.create(username="******")
    data["user1_sumlogs"] = [
        SummaryLog.objects.create(user=data["user1"]) for _ in range(5)
    ]

    data["mc"].serialize_into_store()  # counter is at 3

    # create new instance id and group of facilities
    with EnvironmentVarGuard() as env:
        env["MORANGO_SYSTEM_ID"] = "new_sys_id"

        data["group2_id"] = InstanceIDModel.get_or_create_current_instance(
            clear_cache=True)[0]  # new counter is at 0

        data["mc"].serialize_into_store()  # new counter is at 1
        data["group2_c1"] = [FacilityFactory() for _ in range(5)]

        # create users and logs associated with user
        data["user2"] = MyUser.objects.create(username="******")
        data["user2_sumlogs"] = [
            SummaryLog.objects.create(user=data["user2"]) for _ in range(5)
        ]
        data["user2_interlogs"] = [
            InteractionLog.objects.create(user=data["user2"]) for _ in range(5)
        ]

        data["user3"] = MyUser.objects.create(username="******")
        data["user3_sumlogs"] = [
            SummaryLog.objects.create(user=data["user3"]) for _ in range(5)
        ]
        data["user3_interlogs"] = [
            InteractionLog.objects.create(user=data["user3"]) for _ in range(5)
        ]

        data["mc"].serialize_into_store()  # new counter is at 2

        data["user4"] = MyUser.objects.create(
            username="******", _morango_partition="badpartition")
        data["mc"].serialize_into_store()  # new counter is at 3

    return data