def setUp(self): DatabaseIDModel.objects.create() InstanceIDModel.get_or_create_current_instance() self.range = 10 self.mc = MorangoProfileController('facilitydata') self.original_name = "ralphie" self.new_name = "rafael"
def check_database_is_migrated(): """ Use a check that the database instance id model is initialized to check if the database is in a proper state to be used. This must only be run after django initialization. """ apps.check_apps_ready() from django.db import connection from morango.models import InstanceIDModel try: InstanceIDModel.get_or_create_current_instance()[0] connection.close() return except OperationalError: try: migrate_databases() return except Exception as e: logging.error( "Tried to migrate the database but another error occurred: {}". format(e)) except Exception as e: logging.error( "Tried to check that the database was accessible and an error occurred: {}" .format(e)) sys.exit(1)
def test_id_changes_id_hash_changes(self): old_id_hash = self.m_info.data['instance_hash'] with mock.patch('platform.platform', return_value='platform'): InstanceIDModel.get_or_create_current_instance() m_info = self.client.get(reverse('morangoinfo-detail', kwargs={"pk": 1}), format='json') self.assertNotEqual(m_info.data['instance_hash'], old_id_hash)
def create_sync_session(self, client_cert, server_cert): # if server cert does not exist locally, retrieve it from server if not Certificate.objects.filter(id=server_cert.id).exists(): self._get_certificate_chain(server_cert) # request the server for a one-time-use nonce nonce_resp = self._request(api_urls.NONCE, method="POST") nonce = json.loads(nonce_resp.content.decode())["id"] # if no hostname then url is actually an ip url = urlparse(self.base_url) hostname = url.hostname or self.base_url port = url.port or (80 if url.scheme == 'http' else 443) # prepare the data to send in the syncsession creation request data = { "id": uuid.uuid4().hex, "server_certificate_id": server_cert.id, "client_certificate_id": client_cert.id, "profile": client_cert.profile, "certificate_chain": json.dumps(CertificateSerializer(client_cert.get_ancestors(include_self=True), many=True).data), "connection_path": self.base_url, "instance": json.dumps(InstanceIDSerializer(InstanceIDModel.get_or_create_current_instance()[0]).data), "nonce": nonce, "client_ip": _get_client_ip_for_server(hostname, port), "server_ip": _get_server_ip(hostname), } # sign the nonce/ID combo to attach to the request message = "{nonce}:{id}".format(**data) data["signature"] = client_cert.sign(message) # Sync Session creation request session_resp = self._request(api_urls.SYNCSESSION, method="POST", data=data) # check that the nonce/id were properly signed by the server cert if not server_cert.verify(message, session_resp.json().get("signature")): raise CertificateSignatureInvalid() # build the data to be used for creating our own syncsession data = { "id": data['id'], "start_timestamp": timezone.now(), "last_activity_timestamp": timezone.now(), "active": True, "is_server": False, "client_certificate": client_cert, "server_certificate": server_cert, "profile": client_cert.profile, "connection_kind": "network", "connection_path": self.base_url, "client_ip": data['client_ip'], "server_ip": data['server_ip'], "client_instance": json.dumps(InstanceIDSerializer(InstanceIDModel.get_or_create_current_instance()[0]).data), "server_instance": session_resp.json().get("server_instance") or "{}", } sync_session = SyncSession.objects.create(**data) return SyncClient(self, sync_session)
def perform_ping(self, server): instance, _ = InstanceIDModel.get_or_create_current_instance() devicesettings = DeviceSettings.objects.first() language = devicesettings.language_id if devicesettings else "" data = { "instance_id": instance.id, "version": kolibri.__version__, "mode": os.environ.get("KOLIBRI_RUN_MODE", ""), "platform": instance.platform, "sysversion": instance.sysversion, "database_id": instance.database.id, "system_id": instance.system_id, "node_id": instance.node_id, "language": language, "uptime": int( (datetime.now() - self.started).total_seconds() / 60), # possibly add: channels, user count, dataset ids, high-level stats? } jsondata = json.dumps(data) logging.info("data: {}".format(jsondata)) response = requests.post(server, data=jsondata, timeout=60) response.raise_for_status() return json.loads(response.content or "{}")
def get_initial_syncsession_data_for_request(self): # fetch a nonce value to use in creating the syncsession response = self.client.post(reverse('nonces-list'), {}, format='json') nonce = json.loads(response.content.decode())["id"] # prepare the data to send in the syncsession creation request data = { "id": uuid.uuid4().hex, "server_certificate_id": self.root_cert1_with_key.id, "client_certificate_id": self.sub_subset_cert1_with_key.id, "certificate_chain": json.dumps( CertificateSerializer( self.sub_subset_cert1_with_key.get_ancestors( include_self=True), many=True).data), "connection_path": "http://127.0.0.1:8000", "instance": json.dumps( InstanceIDSerializer( InstanceIDModel.get_or_create_current_instance()[0]).data), "nonce": nonce, } # sign the nonce/ID combo to attach to the request data["signature"] = self.sub_subset_cert1_with_key.sign( "{nonce}:{id}".format(**data)) return data
def setUp(self): DatabaseIDModel.objects.create() (self.current_id, _) = InstanceIDModel.get_or_create_current_instance() self.mc = MorangoProfileController('facilitydata') self.fac1 = FacilityModelFactory(name='school') self.mc.serialize_into_store() self.old_rmc = RecordMaxCounter.objects.first()
def get(self, request, format=None): info = {} info['version'] = kolibri.__version__ status, urls = get_urls() if not urls: # Will not return anything when running the debug server, so at least return the current URL urls = [request.build_absolute_uri('/')] filtered_urls = [url for url in urls if '127.0.0.1' not in url and 'localhost' not in url] if filtered_urls: urls = filtered_urls info['urls'] = urls if settings.DATABASES['default']['ENGINE'].endswith('sqlite3'): # If any other database backend, will not be file backed, so no database path to return info['database_path'] = settings.DATABASES['default']['NAME'] instance_model = InstanceIDModel.get_or_create_current_instance()[0] info['device_name'] = instance_model.hostname info['device_id'] = instance_model.id info['os'] = instance_model.platform info['content_storage_free_space'] = get_free_space() # This returns the localized time for the server info['server_time'] = local_now() # Returns the named timezone for the server (the time above only includes the offset) info['server_timezone'] = settings.TIME_ZONE return Response(info)
def perform_ping(started, server=DEFAULT_SERVER_URL): url = urljoin(server, "/api/v1/pingback") instance, _ = InstanceIDModel.get_or_create_current_instance() language = get_device_setting("language_id", "") try: timezone = get_current_timezone().zone except Exception: timezone = "" data = { "instance_id": instance.id, "version": kolibri.__version__, "mode": conf.OPTIONS["Deployment"]["RUN_MODE"], "platform": instance.platform, "sysversion": instance.sysversion, "database_id": instance.database.id, "system_id": instance.system_id, "node_id": instance.node_id, "language": language, "timezone": timezone, "uptime": int( (datetime.datetime.now() - started).total_seconds() / 60), "timestamp": localtime(), "installer": installation_type(), } logger.debug("Pingback data: {}".format(data)) jsondata = dump_zipped_json(data) response = requests.post(url, data=jsondata, timeout=60) response.raise_for_status() return json.loads(response.content.decode() or "{}")
def test_device_id(self): response = self.client.get(reverse("kolibri:core:deviceinfo"), format="json") self.assertEqual( response.data["device_id"], InstanceIDModel.get_or_create_current_instance()[0].id, )
def create_dummy_store_data(): data = {} DatabaseIDModel.objects.create() (data['group1_id'], _) = InstanceIDModel.get_or_create_current_instance() # counter is at 0 # create controllers for app/store/buffer operations data['mc'] = MorangoProfileController('facilitydata') data['sc'] = SyncClient(None, 'host') session = SyncSession.objects.create(id=uuid.uuid4().hex, profile="facilitydata", last_activity_timestamp=timezone.now()) data['sc'].current_transfer_session = TransferSession.objects.create(id=uuid.uuid4().hex, sync_session=session, push=True, last_activity_timestamp=timezone.now()) data['mc'].serialize_into_store() # counter is at 1 # create group of facilities and first serialization data['group1_c1'] = [FacilityFactory() for _ in range(5)] data['mc'].serialize_into_store() # counter is at 2 # create group of facilities and second serialization data['group1_c2'] = [FacilityFactory() for _ in range(5)] # create users and logs associated with user data['user1'] = MyUser.objects.create(username='******') data['user1_sumlogs'] = [SummaryLog.objects.create(user=data['user1']) for _ in range(5)] data['mc'].serialize_into_store() # counter is at 3 # create new instance id and group of facilities with mock.patch('platform.platform', return_value='plataforma'): (data['group2_id'], _) = InstanceIDModel.get_or_create_current_instance() # new counter is at 0 data['mc'].serialize_into_store() # new counter is at 1 data['group2_c1'] = [FacilityFactory() for _ in range(5)] # create users and logs associated with user data['user2'] = MyUser.objects.create(username='******') data['user2_sumlogs'] = [SummaryLog.objects.create(user=data['user2']) for _ in range(5)] data['user2_interlogs'] = [InteractionLog.objects.create(user=data['user2']) for _ in range(5)] data['user3'] = MyUser.objects.create(username='******') data['user3_sumlogs'] = [SummaryLog.objects.create(user=data['user3']) for _ in range(5)] data['user3_interlogs'] = [InteractionLog.objects.create(user=data['user3']) for _ in range(5)] data['mc'].serialize_into_store() # new counter is at 2 data['user4'] = MyUser.objects.create(username='******', _morango_partition='badpartition') data['mc'].serialize_into_store() # new counter is at 3 return data
def setUp(self): session = SyncSession.objects.create( id=uuid.uuid4().hex, profile="facilitydata", last_activity_timestamp=timezone.now()) transfer_session = TransferSession.objects.create( id=uuid.uuid4().hex, sync_session=session, filter='partition', push=True, last_activity_timestamp=timezone.now(), records_total=3) conn = NetworkSyncConnection() self.syncclient = SyncClient(conn, session) self.syncclient.current_transfer_session = transfer_session self.chunk_size = 3 InstanceIDModel.get_or_create_current_instance()
def test_info_endpoint(self): response = self.client.get(reverse("kolibri:core:info-list")) instance_model = InstanceIDModel.get_or_create_current_instance()[0] self.assertEqual(response.data["application"], "kolibri") self.assertEqual(response.data["kolibri_version"], kolibri.__version__) self.assertEqual(response.data["instance_id"], instance_model.id) self.assertEqual(response.data["device_name"], instance_model.hostname) self.assertEqual(response.data["operating_system"], platform.system())
def ready(self): from morango.models import DatabaseIDModel, InstanceIDModel from .signals import add_to_deleted_models # noqa: F401 # NOTE: Warning: https://docs.djangoproject.com/en/1.10/ref/applications/#django.apps.AppConfig.ready # its recommended not to execute queries in this method, but we are producing the same result after the first call, so its OK # call this on app load up to get most recent system config settings try: if not DatabaseIDModel.objects.all(): DatabaseIDModel.objects.create() InstanceIDModel.get_or_create_current_instance() # we catch this error in case the database has not been migrated, b/c we can't query it until its been created except (OperationalError, ProgrammingError): pass # add models to be synced by profile add_syncable_models()
def setUp(self): (self.current_id, _) = InstanceIDModel.get_or_create_current_instance() self.range = 10 self.mc = MorangoProfileController('facilitydata') for i in range(self.range): self.ident = uuid.uuid4().hex StoreModelFacilityFactory(pk=self.ident, serialized=serialized_facility_factory( self.ident))
def test_patch(self): device_settings = DeviceSettings.objects.get() self.assertEqual( device_settings.name, InstanceIDModel.get_or_create_current_instance()[0].hostname, ) response = self.client.patch(reverse("kolibri:core:devicename"), self.device_name, format="json") self.assertEqual(response.data, self.device_name) device_settings.refresh_from_db() self.assertEqual(device_settings.name, self.device_name["name"]) self.assertNotEqual( device_settings.name, InstanceIDModel.get_or_create_current_instance()[0].hostname, )
def test_same_node_id(self): with mock.patch('uuid.getnode', return_value=67002173923623): # fake (random) address (IDModel, _) = InstanceIDModel.get_or_create_current_instance() ident = IDModel.id with mock.patch('uuid.getnode', return_value=69002173923623): # fake (random) address (IDModel, _) = InstanceIDModel.get_or_create_current_instance() with mock.patch('uuid.getnode', return_value=67002173923623): # fake (random) address (IDModel, _) = InstanceIDModel.get_or_create_current_instance() self.assertFalse( InstanceIDModel.objects.exclude(id=ident).filter( current=True).exists()) self.assertTrue(InstanceIDModel.objects.get(id=ident).current)
def retrieve(self, request, pk=None): (id_model, _) = InstanceIDModel.get_or_create_current_instance() m_info = { 'instance_hash': id_model.get_proquint(), 'instance_id': id_model.id, 'system_os': platform.system(), 'version': morango.__version__ } return response.Response(m_info)
def check_database_is_migrated(): """ This function checks that the database instance id model is initialized. It must only be run after Django initialization. It does not actually verify whether all migrations are run, as this is assumed to be a part of Kolibri version number checking. When a Kolibri version change is detected, we run migrations. Checking that migrations are run for every startup would be costly. """ from django.db import connection from morango.models import InstanceIDModel try: InstanceIDModel.get_or_create_current_instance()[0] connection.close() return except OperationalError as e: raise DatabaseNotMigrated(db_exception=e) except Exception as e: raise DatabaseInaccessible(db_exception=e)
def test_creating_different_instance_ID_model(self): # change system state with mock.patch('platform.platform', return_value='platform'): with mock.patch( 'uuid.getnode', return_value=9999999999999): # fake (random) address (IDModel, _) = InstanceIDModel.get_or_create_current_instance() self.assertEqual(InstanceIDModel.objects.count(), 2) self.assertEqual(IDModel.node_id, '') # assert that node id was not added self.assertEqual(IDModel.id, InstanceIDModel.objects.get(current=True).id)
def handle_async(self, *args, **options): # validate url that is passed in try: URLValidator()((options['base_url'])) except ValidationError: print('Base-url is not valid. Please retry command and enter a valid url.') sys.exit(1) # call this in case user directly syncs without migrating database if not ScopeDefinition.objects.filter(): call_command("loaddata", "scopedefinitions") # ping server at url with info request info_url = urljoin(options['base_url'], 'api/morango/v1/morangoinfo/1/') try: info_resp = requests.get(info_url) except ConnectionError: print('Can not connect to server with base-url: {}'.format(options['base_url'])) sys.exit(1) # if instance_ids are equal, this means device is trying to sync with itself, which we don't allow if InstanceIDModel.get_or_create_current_instance()[0].id == info_resp.json()['instance_id']: print('Device can not sync with itself. Please re-check base-url and try again.') sys.exit(1) controller = MorangoProfileController('facilitydata') with self.start_progress(total=7) as progress_update: network_connection = controller.create_network_connection(options['base_url']) progress_update(1) options['dataset_id'] = self.get_dataset_id(options['base_url'], options['dataset_id']) progress_update(1) client_cert, server_cert, options['username'] = self.get_client_and_server_certs(options['username'], options['password'], options['dataset_id'], network_connection) progress_update(1) sync_client = network_connection.create_sync_session(client_cert, server_cert) progress_update(1) # pull from server and push our own data to server if not options['no_pull']: sync_client.initiate_pull(Filter(options['dataset_id'])) if not options['no_push']: sync_client.initiate_push(Filter(options['dataset_id'])) progress_update(1) self.create_superuser_and_provision_device(options['username'], options['dataset_id']) progress_update(1) sync_client.close_sync_session() progress_update(1)
def test_dequeuing_merge_conflict_hard_delete(self): store = Store.objects.get(id=self.data['model7']) self.assertEqual(store.serialized, "store") self.assertEqual(store.conflicting_serialized_data, "store") with connection.cursor() as cursor: current_id = InstanceIDModel.get_current_instance_and_increment_counter( ) DBBackend._dequeuing_merge_conflict_buffer( cursor, current_id, self.data['sc'].current_transfer_session.id) store.refresh_from_db() self.assertEqual(store.serialized, "") self.assertEqual(store.conflicting_serialized_data, "")
def test_new_rmc_for_existing_model(self): with mock.patch('platform.platform', return_value='Windows'): (new_id, _) = InstanceIDModel.get_or_create_current_instance() Facility.objects.update(name='facility') self.mc.serialize_into_store() new_rmc = RecordMaxCounter.objects.get(instance_id=new_id.id, store_model_id=self.fac1.id) new_store_record = Store.objects.get(id=self.fac1.id) self.assertEqual(new_rmc.counter, new_store_record.last_saved_counter) self.assertEqual(new_rmc.instance_id, new_store_record.last_saved_instance)
def test_dequeuing_update_rmcs_last_saved_by(self): self.assertFalse( RecordMaxCounter.objects.filter( instance_id=self.current_id.id).exists()) with connection.cursor() as cursor: current_id = InstanceIDModel.get_current_instance_and_increment_counter( ) _dequeuing_update_rmcs_last_saved_by( cursor, current_id, self.data['sc'].current_transfer_session.id) self.assertTrue( RecordMaxCounter.objects.filter( instance_id=current_id.id).exists())
def list(self, request): """Returns metadata information about the device""" instance_model = InstanceIDModel.get_or_create_current_instance()[0] info = { "application": "kolibri", "kolibri_version": kolibri.__version__, "instance_id": instance_model.id, "device_name": instance_model.hostname, "operating_system": platform.system(), } return Response(info)
def test_last_saved_instance_updates(self): FacilityModelFactory(name=self.original_name) self.mc.serialize_into_store() old_instance_id = Store.objects.first().last_saved_instance with mock.patch('platform.platform', return_value='Windows'): (new_id, _) = InstanceIDModel.get_or_create_current_instance() Facility.objects.all().update(name=self.new_name) self.mc.serialize_into_store() new_instance_id = Store.objects.first().last_saved_instance self.assertNotEqual(old_instance_id, new_instance_id) self.assertEqual(new_instance_id, new_id.id)
def test_new_rmc_for_non_existent_model(self): with mock.patch('platform.platform', return_value='Windows'): (new_id, _) = InstanceIDModel.get_or_create_current_instance() new_fac = FacilityModelFactory(name='college') self.mc.serialize_into_store() new_rmc = RecordMaxCounter.objects.get(instance_id=new_id.id, store_model_id=new_fac.id) new_store_record = Store.objects.get(id=new_fac.id) self.assertNotEqual(new_id.id, self.current_id.id) self.assertEqual(new_store_record.last_saved_instance, new_rmc.instance_id) self.assertEqual(new_store_record.last_saved_counter, new_rmc.counter)
def create_syncsession(self, client_certificate=None, server_certificate=None): if not client_certificate: client_certificate = self.sub_subset_cert1_with_key if not server_certificate: server_certificate = self.root_cert1_with_key # fetch a nonce value to use in creating the syncsession response = self.client.post(reverse('nonces-list'), {}, format='json') nonce = json.loads(response.content.decode())["id"] # prepare the data to send in the syncsession creation request data = { "id": uuid.uuid4().hex, "server_certificate_id": server_certificate.id, "client_certificate_id": client_certificate.id, "profile": client_certificate.profile, "certificate_chain": json.dumps( CertificateSerializer( client_certificate.get_ancestors(include_self=True), many=True).data), "connection_path": "http://127.0.0.1:8000", "instance": json.dumps( InstanceIDSerializer( InstanceIDModel.get_or_create_current_instance()[0]).data), "nonce": nonce, } # sign the nonce/ID combo to attach to the request data["signature"] = client_certificate.sign( "{nonce}:{id}".format(**data)) # make the API call to create the SyncSession response = self.client.post(reverse('syncsessions-list'), data, format='json') self.assertEqual(response.status_code, 201) return SyncSession.objects.get(id=data["id"])
def test_dequeuing_merge_conflict_buffer_rmcb_less_rmc(self): store = Store.objects.get(id=self.data['model5']) self.assertNotEqual(store.last_saved_instance, self.current_id.id) self.assertEqual(store.conflicting_serialized_data, "store") with connection.cursor() as cursor: current_id = InstanceIDModel.get_current_instance_and_increment_counter( ) _dequeuing_merge_conflict_buffer( cursor, current_id, self.data['sc'].current_transfer_session.id) store = Store.objects.get(id=self.data['model5']) self.assertEqual(store.last_saved_instance, current_id.id) self.assertEqual(store.last_saved_counter, current_id.counter) self.assertEqual(store.conflicting_serialized_data, "buffer\nstore")
def get(self, request, format=None): info = {} info["version"] = kolibri.__version__ status, urls = get_urls() if not urls: # Will not return anything when running the debug server, so at least return the current URL urls = [ request.build_absolute_uri(OPTIONS["Deployment"]["URL_PATH_PREFIX"]) ] filtered_urls = [ url for url in urls if "127.0.0.1" not in url and "localhost" not in url ] if filtered_urls: urls = filtered_urls info["urls"] = urls db_engine = settings.DATABASES["default"]["ENGINE"] if db_engine.endswith("sqlite3"): # Return path to .sqlite file (usually in KOLIBRI_HOME folder) info["database_path"] = settings.DATABASES["default"]["NAME"] elif db_engine.endswith("postgresql"): info["database_path"] = "postgresql" else: info["database_path"] = "unknown" instance_model = InstanceIDModel.get_or_create_current_instance()[0] info["device_id"] = instance_model.id info["os"] = instance_model.platform info["content_storage_free_space"] = get_free_space( OPTIONS["Paths"]["CONTENT_DIR"] ) # This returns the localized time for the server info["server_time"] = local_now() # Returns the named timezone for the server (the time above only includes the offset) info["server_timezone"] = settings.TIME_ZONE info["installer"] = installation_type() info["python_version"] = "{major}.{minor}.{micro}".format( major=version_info.major, minor=version_info.minor, micro=version_info.micro ) return Response(info)
def run_services(port): # Initialize the iceqube scheduler to handle scheduled tasks from kolibri.core.tasks.main import scheduler scheduler.clear_scheduler() # schedule the pingback job from kolibri.core.analytics.utils import schedule_ping schedule_ping() # schedule the vacuum job from kolibri.core.deviceadmin.utils import schedule_vacuum schedule_vacuum() # This is run every time the server is started to clear all the tasks # in the queue from kolibri.core.tasks.main import queue queue.empty() # Initialize the iceqube engine to handle queued tasks from kolibri.core.tasks.main import initialize_workers workers = initialize_workers() scheduler.start_scheduler() # Register the Kolibri zeroconf service so it will be discoverable on the network from morango.models import InstanceIDModel from kolibri.core.discovery.utils.network.search import register_zeroconf_service instance, _ = InstanceIDModel.get_or_create_current_instance() register_zeroconf_service(port=port, id=instance.id[:4]) cleanup_func = partial(_cleanup_before_quitting, workers=workers) try: signal.signal(signal.SIGINT, cleanup_func) signal.signal(signal.SIGTERM, cleanup_func) logger.info("Added signal handlers for cleaning up on exit") except ValueError: logger.warn("Error adding signal handlers for cleaning up on exit")
def get(self, request, format=None): info = {} info["version"] = kolibri.__version__ status, urls = get_urls() if not urls: # Will not return anything when running the debug server, so at least return the current URL urls = [ request.build_absolute_uri( OPTIONS["Deployment"]["URL_PATH_PREFIX"]) ] filtered_urls = [ url for url in urls if "127.0.0.1" not in url and "localhost" not in url ] if filtered_urls: urls = filtered_urls info["urls"] = urls if settings.DATABASES["default"]["ENGINE"].endswith("sqlite3"): # If any other database backend, will not be file backed, so no database path to return info["database_path"] = settings.DATABASES["default"]["NAME"] instance_model = InstanceIDModel.get_or_create_current_instance()[0] info["device_name"] = instance_model.hostname info["device_id"] = instance_model.id info["os"] = instance_model.platform info["content_storage_free_space"] = get_free_space( OPTIONS["Paths"]["CONTENT_DIR"]) # This returns the localized time for the server info["server_time"] = local_now() # Returns the named timezone for the server (the time above only includes the offset) info["server_timezone"] = settings.TIME_ZONE info["installer"] = installation_type() return Response(info)
def test_device_id(self): response = self.client.get(reverse('deviceinfo'), format="json") self.assertEqual(response.data['device_id'], InstanceIDModel.get_or_create_current_instance()[0].id)
def setUp(self): self.controller = MorangoProfileController('facilitydata') InstanceIDModel.get_or_create_current_instance()