class GCUpdateTestCase(unittest.TestCase): """ DataClayMock object for simulation. """ mock = SimpleMock() def setUp(self): """ PyUnit function called before every test case. Starts DataClay simulation in one Python interpreter and one Java VM. This allows us to Debug in a local machine without dockers and without a full start of DataClay (jars, configurations, ...) """ Configuration.MEMMGMT_PRESSURE_FRACTION = 0.01 self.mock.setUp(__file__) def tearDown(self): """ Finish all services started for simulation. """ self.mock.tearDown() @pytest.mark.timeout(5000, method='thread') def test(self): """Test. note that all test method names must begin with 'test.'""" """WARNING: IT IS HIGHLY RECOMMENDED TO HAVE ONE TEST ONLY TO ISOLATE FUNCTIONAL TESTS FROM EACH OTHER. i.e. Start a new Python Interpreter and JVM for each test. In the end, it means only one test in this class. """ from dataclay.api import init, finish logger.debug('**Starting init**') init() """ Imports. Imports must be located here in order to simulate "import" order in a real scenario. VERY IMPORTANT: Imports must be located AFTER init """ from model.classes import WebSite, WebPage, URI self.session_initialized = True """ Test. From now on, the Functional Test itself. """ uri = URI("host/bsc") try: uri.make_persistent() except: traceback.print_exc() from dataclay import getRuntime getRuntime().close_session() # Check if object exists while self.mock.mock.mock_dataclay.objectExists(str(uri.get_object_id())): print("Waiting... ") time.sleep(5) logger.debug("Test OK!")
class ExceptionsTest(unittest.TestCase): """ DataClayMock object for simulation. """ mock = SimpleMock() def setUp(self): """ PyUnit function called before every test case. Starts DataClay simulation in one Python interpreter and one Java VM. This allows us to Debug in a local machine without dockers and without a full start of DataClay (jars, configurations, ...) """ self.mock.setUp(__file__) def tearDown(self): """ Finish all services started for simulation. """ self.mock.tearDown() @pytest.mark.timeout(300, method='thread') def test(self): """Test. note that all test method names must begin with 'test.'""" """WARNING: IT IS HIGHLY RECOMMENDED TO HAVE ONE TEST ONLY TO ISOLATE FUNCTIONAL TESTS FROM EACH OTHER. i.e. Start a new Python Interpreter and JVM for each test. In the end, it means only one test in this class. """ from dataclay.api import init from dataclay import getRuntime logger.debug('**Starting init**') init() """ Imports. Imports must be located here in order to simulate "import" order in a real scenario. VERY IMPORTANT: Imports must be located AFTER init """ from model.classes import Person self.session_initialized = True """ Test. From now on, the Functional Test itself. """ p = Person() p.make_persistent() # IndexError when we try to access to an invalid index try: p.raise_exception() except IndexError: print("Expected built-in Exception IndexError") self.assertTrue(True) except: print("Unexpected Exception") self.assertTrue(False) logger.debug("Test OK!")
class RemoteCloneTest(unittest.TestCase): """ DataClayMock object for simulation. """ mock = SimpleMock() def setUp(self): """ PyUnit function called before every test case. Starts DataClay simulation in one Python interpreter and one Java VM. This allows us to Debug in a local machine without dockers and without a full start of DataClay (jars, configurations, ...) """ self.mock.setUp(__file__) def tearDown(self): """ Finish all services started for simulation. """ self.mock.tearDown() @pytest.mark.timeout(300, method='thread') def test(self): """Test. note that all test method names must begin with 'test.'""" """WARNING: IT IS HIGHLY RECOMMENDED TO HAVE ONE TEST ONLY TO ISOLATE FUNCTIONAL TESTS FROM EACH OTHER. i.e. Start a new Python Interpreter and JVM for each test. In the end, it means only one test in this class. """ from dataclay.api import init logger.debug('**Starting init**') init() """ Imports. Imports must be located here in order to simulate "import" order in a real scenario. VERY IMPORTANT: Imports must be located AFTER init """ from model.classes import WebSite, WebPage, URI self.session_initialized = True """ Test. From now on, the Functional Test itself. """ # Init source host = "bsc.es" web_site = WebSite(host) web_page = WebPage(host + "/page.html") web_site.add_web_page(web_page) web_site.make_persistent() # Init target host2 = "bsc2.es" web_site2 = WebSite(host2) web_site2.make_persistent() web_site2.copy_pages(web_site) self.assertTrue(len(web_site2.pages) == len(web_site.pages)) logger.debug("Test OK!")
class CheckMasterLocationTest(unittest.TestCase): """ DataClayMock object for simulation. """ mock = SimpleMock() def setUp(self): """ PyUnit function called before every test case. Starts DataClay simulation in one Python interpreter and one Java VM. This allows us to Debug in a local machine without dockers and without a full start of DataClay (jars, configurations, ...) """ self.mock.setUp(__file__, nodes=2) def tearDown(self): """ Finish all services started for simulation. """ self.mock.tearDown() @pytest.mark.timeout(300, method='thread') def test(self): """Test. note that all test method names must begin with 'test.'""" """WARNING: IT IS HIGHLY RECOMMENDED TO HAVE ONE TEST ONLY TO ISOLATE FUNCTIONAL TESTS FROM EACH OTHER. i.e. Start a new Python Interpreter and JVM for each test. In the end, it means only one test in this class. """ from dataclay.api import init logger.debug('**Starting init**') init() """ Imports. Imports must be located here in order to simulate "import" order in a real scenario. VERY IMPORTANT: Imports must be located AFTER init """ from model.classes import Person from dataclay.commonruntime.Runtime import getRuntime self.session_initialized = True """ Test. From now on, the Functional Test itself. """ p = Person('foo', 100) execution_environments = list(getRuntime().get_execution_environments_info().keys()) self.assertTrue(len(execution_environments) > 1) p.make_persistent(backend_id=execution_environments[0]) p.new_replica(backend_id=execution_environments[1]) self.assertEqual(p.run_remote(execution_environments[0], 'getMyMasterLocation', None), execution_environments[0]) self.assertEqual(p.run_remote(execution_environments[1], 'getMyMasterLocation', None), execution_environments[0]) logger.debug("Test OK!")
class AliasEmptyTest(unittest.TestCase): """ DataClayMock object for simulation. """ mock = SimpleMock() def setUp(self): """ PyUnit function called before every test case. Starts DataClay simulation in one Python interpreter and one Java VM. This allows us to Debug in a local machine without dockers and without a full start of DataClay (jars, configurations, ...) """ self.mock.setUp(__file__) def tearDown(self): """ Finish all services started for simulation. """ self.mock.tearDown() @pytest.mark.timeout(300, method='thread') def test(self): """Test. note that all test method names must begin with 'test.'""" """WARNING: IT IS HIGHLY RECOMMENDED TO HAVE ONE TEST ONLY TO ISOLATE FUNCTIONAL TESTS FROM EACH OTHER. i.e. Start a new Python Interpreter and JVM for each test. In the end, it means only one test in this class. """ from dataclay.api import init logger.debug('**Starting init**') init() """ Imports. Imports must be located here in order to simulate "import" order in a real scenario. VERY IMPORTANT: Imports must be located AFTER init """ from model.classes import Person self.session_initialized = True """ Test. From now on, the Functional Test itself. """ alias = "" # Test make_persistent object1 = Person(name='Nikola', age=86) worked = False try: object1.make_persistent(alias) except: worked = True finally: self.assertTrue(worked) logger.debug("Test OK!")
class GCUpdateTestCase(unittest.TestCase): """ DataClayMock object for simulation. """ mock = SimpleMock() def setUp(self): """ PyUnit function called before every test case. Starts DataClay simulation in one Python interpreter and one Java VM. This allows us to Debug in a local machine without dockers and without a full start of DataClay (jars, configurations, ...) """ Configuration.MEMMGMT_PRESSURE_FRACTION = 0.01 self.mock.setUp(__file__) def tearDown(self): """ Finish all services started for simulation. """ self.mock.tearDown() def client_process1(self, q): try: from dataclay.api import init, finish logger.debug('**Starting init 1**') init() """ Imports. Imports must be located here in order to simulate "import" order in a real scenario. VERY IMPORTANT: Imports must be located AFTER init """ from model.classes import WebSite, WebPage, URI """ Test. From now on, the Functional Test itself. """ web_sites_ids_str = list() for i in range(0, 10): alias = "bsc%s" % str(i) web_site = WebSite(alias) try: web_site.make_persistent(alias=alias) except: traceback.print_exc() web_sites_ids_str.append(str(web_site.get_object_id())) finish() q.put(["OK", web_sites_ids_str]) except: q.put("FAIL") def client_process2(self, q, web_sites_ids_str): try: from dataclay.api import init, finish logger.debug('**Starting init 2 **') init() """ Imports. Imports must be located here in order to simulate "import" order in a real scenario. VERY IMPORTANT: Imports must be located AFTER init """ from model.classes import WebSite, WebPage, URI for i in range(0, 10): web_site_2 = WebSite.get_by_alias("bsc%s" % str(i)) self.assertEqual(web_sites_ids_str[i], str(web_site_2.get_object_id())) finish() q.put("OK") except: q.put("FAIL") @pytest.mark.timeout(1000, method='thread') def test(self): """Test. note that all test method names must begin with 'test.'""" """WARNING: IT IS HIGHLY RECOMMENDED TO HAVE ONE TEST ONLY TO ISOLATE FUNCTIONAL TESTS FROM EACH OTHER. i.e. Start a new Python Interpreter and JVM for each test. In the end, it means only one test in this class. """ logger.info('**Starting test**') q = Queue() p = Process(target=self.client_process1, args=(q, )) p.start() result = q.get() p.join() self.assertEqual(result[0], "OK") web_sites_ids_str = result[1] logger.debug("Restarting dataClay") self.mock.mock.restartDataClay() p = Process(target=self.client_process2, args=(q, web_sites_ids_str)) p.start() result = q.get() p.join() self.assertEqual(result, "OK") logger.info("** Test OK!")
class GCUpdateTestCase(unittest.TestCase): """ DataClayMock object for simulation. """ mock = SimpleMock() def setUp(self): """ To force GC, hack memory pressure """ # ToDo: the (...).Configuration is extremely ugly and completely useless # ToDo: @abarcelo I left it like that because I did not understand previous # ToDo: ExecutionEnvironmentHeapManager.GC_MEMORY_... usage Configuration.MEMMGMT_PRESSURE_FRACTION = 0.01 rsrc = resource.RLIMIT_AS soft, hard = resource.getrlimit(rsrc) logger.debug('Soft limit starts as :%s', soft) logger.debug('Hard limit starts as :%s', hard) kb = 1024 mb = 1024 * kb gb = 1024 * mb # resource.setrlimit(rsrc, (-1, hard)) soft, hard = resource.getrlimit(rsrc) logger.debug('Soft limit changed to :%s', soft) """ PyUnit function called before every test case. Starts DataClay simulation in one Python interpreter and one Java VM. This allows us to Debug in a local machine without dockers and without a full start of DataClay (jars, configurations, ...) """ self.mock.setUp(__file__) def tearDown(self): """ Finish all services started for simulation. """ self.mock.tearDown() @pytest.mark.timeout(300, method='thread') def test(self): """Test. note that all test method names must begin with 'test.'""" """WARNING: IT IS HIGHLY RECOMMENDED TO HAVE ONE TEST ONLY TO ISOLATE FUNCTIONAL TESTS FROM EACH OTHER. i.e. Start a new Python Interpreter and JVM for each test. In the end, it means only one test in this class. """ from dataclay.api import init logger.debug('**Starting init**') init() """ Imports. Imports must be located here in order to simulate "import" order in a real scenario. VERY IMPORTANT: Imports must be located AFTER init """ from model.classes import WebSite, WebPage, URI self.session_initialized = True """ Test. From now on, the Functional Test itself. """ host = "bsc.es" web_site = WebSite(host + "/foo/bsc.html") web_site.make_persistent(alias=web_site.uri.host) cur_host = "volatile_web" web_page = WebPage(cur_host) logger.debug("web page oid = %s", web_page.get_object_id()) logger.debug("uri oid = %s", web_page.uri.get_object_id()) web_site.add_web_page(web_page) """ Sleep enough time to allow GC action """ print("Waiting for GC action...") time.sleep(5) """ Modify web page """ web_page.uri.host = "new_volatile_web" """ Sleep enough time to allow GC action """ print("Waiting for GC action...") time.sleep(5) """ Get web page """ self.assertEquals(web_page.uri.host, "new_volatile_web") logger.debug("Test OK!")
class TypesTest(unittest.TestCase): """ DataClayMock object for simulation. """ mock = SimpleMock() def setUp(self): """ PyUnit function called before every test case. Starts DataClay simulation in one Python interpreter and one Java VM. This allows us to Debug in a local machine without dockers and without a full start of DataClay (jars, configurations, ...) """ self.mock.setUp(__file__) def tearDown(self): """ Finish all services started for simulation. """ self.mock.tearDown() @pytest.mark.timeout(300, method='thread') def test(self): """Test. note that all test method names must begin with 'test.'""" """WARNING: IT IS HIGHLY RECOMMENDED TO HAVE ONE TEST ONLY TO ISOLATE FUNCTIONAL TESTS FROM EACH OTHER. i.e. Start a new Python Interpreter and JVM for each test. In the end, it means only one test in this class. """ from dataclay.api import init logger.debug('**Starting init**') init() """ Imports. Imports must be located here in order to simulate "import" order in a real scenario. VERY IMPORTANT: Imports must be located AFTER init """ from model.classes import Player, Carrer self.session_initialized = True """ Test. From now on, the Functional Test itself. """ messi = Player("Leo", "Messi", 30) messi.add_role("forward") messi.add_skill("dribbling") messi_carrer = Carrer() stats_2016_2017 = dict() stats_2016_2017["goal"] = 54 stats_2016_2017["presence"] = 52 messi_carrer.add_stat("2016/2017", stats_2016_2017) messi_carrer.add_team("2016/2017", "Barcelona FC") messi.add_carrer(messi_carrer) logger.info("PLAYER %s , ROLES: %s AND SKILLS: %s", messi.personal_info, messi.roles, messi.skills) logger.info("PLAYER %s , WITH CARRER STATS %s", messi.personal_info, messi.carrer.stats) logger.info("PLAYER %s , PLAYED IN %s", messi.personal_info, messi.carrer.teams) d = set() d.add(1) d.add(2) messi.add_test_types(True, 2, "Str", d) messi.make_persistent() self.assertTrue(messi.is_persistent) self.assertTrue(messi.carrer.is_persistent) logger.info("PLAYER %s , ROLES: %s AND SKILLS: %s", messi.personal_info, messi.roles, messi.skills) logger.info("PLAYER %s , WITH CARRER STATS %s", messi.personal_info, messi.carrer.stats) logger.info("PLAYER %s , PLAYED IN %s TYPE %s", messi.personal_info, messi.carrer.teams, type(messi.carrer.teams[0])) logger.info("TEST TYPES %s %s, %s %s, %s %s, %s %s", type(messi.a), messi.a, type(messi.b), messi.b, type(messi.c), messi.c, type(messi.d), messi.d) logger.debug("Test OK!")
class MakePersistentTest(unittest.TestCase): """ DataClayMock object for simulation. """ mock = SimpleMock() def setUp(self): """ PyUnit function called before every test case. Starts DataClay simulation in one Python interpreter and one Java VM. This allows us to Debug in a local machine without dockers and without a full start of DataClay (jars, configurations, ...) """ self.mock.setUp(__file__) def tearDown(self): """ Finish all services started for simulation. """ self.mock.tearDown() @pytest.mark.timeout(300, method='thread') def test(self): """Test. note that all test method names must begin with 'test.'""" """WARNING: IT IS HIGHLY RECOMMENDED TO HAVE ONE TEST ONLY TO ISOLATE FUNCTIONAL TESTS FROM EACH OTHER. i.e. Start a new Python Interpreter and JVM for each test. In the end, it means only one test in this class. """ from dataclay.api import init logger.debug('**Starting init**') init() """ Imports. Imports must be located here in order to simulate "import" order in a real scenario. VERY IMPORTANT: Imports must be located AFTER init """ from model.classes import MissingAttributeConstructor self.session_initialized = True """ Test. From now on, the Functional Test itself. """ o = MissingAttributeConstructor("String1") o.assign_missing("String2") # This should work o.make_persistent() # Those are basic and expected to work self.assertEqual(o.present, "String1") self.assertEqual(o.missing, "String2") # Now let's make persistent an object with a missing (but annotated) attribute o = MissingAttributeConstructor("String3") o.make_persistent() self.assertEqual(o.present, "String3") # this is a design decision of dataClay, not entirely Python compliant self.assertIsNone(o.missing) logger.debug("Test OK!")
class VolatileMapRestartTest(unittest.TestCase): """ DataClayMock object for simulation. """ mock = SimpleMock() def setUp(self): """ PyUnit function called before every test case. Starts DataClay simulation in one Python interpreter and one Java VM. This allows us to Debug in a local machine without dockers and without a full start of DataClay (jars, configurations, ...) """ self.mock.setUp(__file__) def tearDown(self): """ Finish all services started for simulation. """ self.mock.tearDown() def client_process1(self, q): try: from dataclay.api import init, finish logger.info('**Starting init 1**') init() """ Imports. Imports must be located here in order to simulate "import" order in a real scenario. VERY IMPORTANT: Imports must be located AFTER init """ from model.classes import Mapa, Node self.session_initialized = True """ Test. From now on, the Functional Test itself. """ try: m = Mapa.get_by_alias("mapa") logger.info("Already in the DB") except Exception: m = Mapa() m.make_persistent(alias="mapa") logger.info( "Not found, creating Mapa and making it persistent") # Node with current location added by jetson # it should be done with get_current_location() in order to get pos1 and pos2 n = Node(1, 1, 1, 1.5, 1.5) n.make_persistent() m.add(n) print("NODE created and added to Mapa") time.sleep(5) finish() q.put("OK") except: q.put("FAIL") def client_process2(self, q): try: from dataclay.api import init, finish logger.info('**Starting init 2**') init() """ Imports. Imports must be located here in order to simulate "import" order in a real scenario. VERY IMPORTANT: Imports must be located AFTER init """ from model.classes import Mapa, Node self.session_initialized = True """ Test. From now on, the Functional Test itself. """ m = Mapa.get_by_alias("mapa") logger.info("Map obtained ") mapa = m.mapa logger.info("** Getter of mapa done with num elements: %s" % str(len(mapa))) for nid, node in mapa.items(): logger.info("** Found node %s" % str(nid)) finish() q.put("OK") except: q.put("FAIL") @pytest.mark.timeout(500, method='thread') def test(self): """Test. note that all test method names must begin with 'test.'""" """WARNING: IT IS HIGHLY RECOMMENDED TO HAVE ONE TEST ONLY TO ISOLATE FUNCTIONAL TESTS FROM EACH OTHER. i.e. Start a new Python Interpreter and JVM for each test. In the end, it means only one test in this class. """ logger.info('**Starting test**') q = Queue() p = Process(target=self.client_process1, args=(q, )) p.start() result = q.get() p.join() self.assertEqual(result, "OK") logger.debug("Restarting dataClay") self.mock.mock.restartDataClay() p = Process(target=self.client_process2, args=(q, )) p.start() result = q.get() p.join() self.assertEqual(result, "OK") logger.info("** Test OK!")
class InitializationTest(unittest.TestCase): """ DataClayMock object for simulation. """ mock = SimpleMock() def setUp(self): """ PyUnit function called before every test case. Starts DataClay simulation in one Python interpreter and one Java VM. This allows us to Debug in a local machine without dockers and without a full start of DataClay (jars, configurations, ...) """ self.mock.setUp(__file__, nodes=2) def tearDown(self): """ Finish all services started for simulation. """ self.mock.tearDown() @unittest.skip( "skipping test because get_execution_environments_per_locations_for_ds is deprecated" ) @pytest.mark.timeout(300, method='thread') def test(self): """Test. note that all test method names must begin with 'test.'""" """WARNING: IT IS HIGHLY RECOMMENDED TO HAVE ONE TEST ONLY TO ISOLATE FUNCTIONAL TESTS FROM EACH OTHER. i.e. Start a new Python Interpreter and JVM for each test. In the end, it means only one test in this class. """ from dataclay.api import init logger.debug('**Starting init**') init() """ Imports. Imports must be located here in order to simulate "import" order in a real scenario. VERY IMPORTANT: Imports must be located AFTER init """ from dataclay import getRuntime from dataclay.communication.grpc.messages.common.common_messages_pb2 import LANG_PYTHON, LANG_JAVA from dataclay.commonruntime.Settings import settings self.session_initialized = True """ Test. From now on, the Functional Test itself. """ lm_client = getRuntime().ready_clients["@LM"] python_ees_info = lm_client.get_execution_environments_info( settings.current_session_id, LANG_PYTHON) java_ees_info = lm_client.get_execution_environments_info( settings.current_session_id, LANG_JAVA) #### WARNING!!!!! get_execution_environments_per_locations_for_ds DEPRECATED! python_ee_per_loc_for_ds = lm_client.get_execution_environments_per_locations_for_ds( LANG_PYTHON) java_ee_per_loc_for_ds = lm_client.get_execution_environments_per_locations_for_ds( LANG_JAVA) # Check that EEs are correctly initialized and assigned to the right SL for py_ee in python_ees_info: self.assertNotIn(py_ee, java_ees_info.values()) self.assertIn(py_ee, python_ee_per_loc_for_ds.values()) self.assertNotIn(py_ee, java_ee_per_loc_for_ds.values()) for java_ee in java_ees_info: self.assertNotIn(java_ee, python_ees_info.values()) self.assertIn(java_ee, java_ee_per_loc_for_ds.values()) self.assertNotIn(java_ee, python_ee_per_loc_for_ds.values()) logger.debug("Test OK!")
class VerifyAliasTest(unittest.TestCase): """ DataClayMock object for simulation. """ mock = SimpleMock() def setUp(self): """ PyUnit function called before every test case. Starts DataClay simulation in one Python interpreter and one Java VM. This allows us to Debug in a local machine without dockers and without a full start of DataClay (jars, configurations, ...) """ self.mock.setUp(__file__) def tearDown(self): """ Finish all services started for simulation. """ self.mock.tearDown() @pytest.mark.timeout(300, method='thread') def test(self): """Test. note that all test method names must begin with 'test.'""" """WARNING: IT IS HIGHLY RECOMMENDED TO HAVE ONE TEST ONLY TO ISOLATE FUNCTIONAL TESTS FROM EACH OTHER. i.e. Start a new Python Interpreter and JVM for each test. In the end, it means only one test in this class. """ from dataclay.api import init logger.debug('**Starting init**') init() """ Imports. Imports must be located here in order to simulate "import" order in a real scenario. VERY IMPORTANT: Imports must be located AFTER init """ from model.classes import Person from dataclay import getRuntime from dataclay.commonruntime.Settings import settings self.session_initialized = True """ Test. From now on, the Functional Test itself. """ lm_client = getRuntime().ready_clients["@LM"] alias = "test_alias" # Test make_persistent person = Person(name='Nikola', age=86) person.make_persistent(alias) # Verify object_iD is not null object_id = person.get_object_id() self.assertTrue(object_id != None) # check you can get person by alias without exception Person.get_by_alias(alias=alias) # Verify that object is in DB/Cache/Metadata # TODO: Missing the check on DB metadata = lm_client.get_metadata_by_oid(settings.current_session_id, object_id) alias_cache = getRuntime().alias_cache self.assertIn(alias, metadata.aliases) self.assertIn(alias, alias_cache) # Test delete_alias Person.delete_alias(alias) self.assertRaises(Exception, Person.get_by_alias, alias=alias) # Verify that object is not in DB/Cache/Metadata metadata = lm_client.get_metadata_by_oid(settings.current_session_id, object_id) alias_cache = getRuntime().alias_cache self.assertNotIn(alias, metadata.aliases) self.assertNotIn(alias, alias_cache) logger.debug("Test OK!")
class PickleConsolidate2Test(unittest.TestCase): """ DataClayMock object for simulation. """ mock = SimpleMock() def setUp(self): """ PyUnit function called before every test case. Starts DataClay simulation in one Python interpreter and one Java VM. This allows us to Debug in a local machine without dockers and without a full start of DataClay (jars, configurations, ...) """ self.mock.setUp(__file__) def tearDown(self): """ Finish all services started for simulation. """ self.mock.tearDown() @pytest.mark.timeout(300, method='thread') def test(self): """Test. note that all test method names must begin with 'test.'""" """WARNING: IT IS HIGHLY RECOMMENDED TO HAVE ONE TEST ONLY TO ISOLATE FUNCTIONAL TESTS FROM EACH OTHER. i.e. Start a new Python Interpreter and JVM for each test. In the end, it means only one test in this class. """ from dataclay.api import init logger.debug('**Starting init**') init() """ Imports. Imports must be located here in order to simulate "import" order in a real scenario. VERY IMPORTANT: Imports must be located AFTER init """ from model.classes import WebSite, WebPage, URI self.session_initialized = True """ Test. From now on, the Functional Test itself. """ host = "bsc.es" web_site = WebSite(host) web_page = WebPage(host + "/page.html") # Add web_page to web_site and make it persistent web_site.add_web_page(web_page) self.assertIs(web_page, web_site.pages[0]) web_site.make_persistent() self.assertTrue(web_site.is_persistent()) self.assertEqual(len(web_site.get_all_locations()), 1) self.assertTrue(web_site.uri.is_persistent()) self.assertEqual(len(web_site.uri.get_all_locations()), 1) # NewVersion for WebSite version_info, unloaded_version_info = web_site.new_version( list(web_site.get_all_locations().keys())[0]) logger.debug(version_info) versionOID = version_info.versionOID web_site_version = WebSite.get_object_by_id(versionOID) self.assertNotEqual(web_site.get_object_id(), web_site_version.get_object_id()) self.assertTrue(web_site_version.is_persistent()) self.assertEqual(len(web_site_version.get_all_locations()), 1) self.assertTrue(web_site_version.uri.is_persistent()) self.assertEqual(len(web_site_version.uri.get_all_locations()), 1) web_page_version = web_site_version.pages[0] self.assertEqual(len(web_site_version.pages), 1) self.assertNotEqual(web_page.get_object_id(), web_page_version.get_object_id()) self.assertNotEqual(web_page, web_page_version) # Remove WebPage to version pages and consolidate web_site_version.remove_last_web_page() self.assertEqual(len(web_site_version.pages), 0) self.assertEqual(len(web_site.pages), 1) web_site.consolidate_version(unloaded_version_info) self.assertEqual(len(web_site.pages), 0) logger.debug("Test OK!")
class ExecuteReplicatedPropertiesTest(unittest.TestCase): """ DataClayMock object for simulation. """ mock = SimpleMock() def setUp(self): """ PyUnit function called before every test case. Starts DataClay simulation in one Python interpreter and one Java VM. This allows us to Debug in a local machine without dockers and without a full start of DataClay (jars, configurations, ...) """ self.mock.setUp(__file__, nodes=2) def tearDown(self): """ Finish all services started for simulation. """ self.mock.tearDown() @pytest.mark.timeout(300, method='thread') def test(self): """Test. note that all test method names must begin with 'test.'""" """WARNING: IT IS HIGHLY RECOMMENDED TO HAVE ONE TEST ONLY TO ISOLATE FUNCTIONAL TESTS FROM EACH OTHER. i.e. Start a new Python Interpreter and JVM for each test. In the end, it means only one test in this class. """ from dataclay.api import init logger.debug('**Starting init**') init() """ Imports. Imports must be located here in order to simulate "import" order in a real scenario. VERY IMPORTANT: Imports must be located AFTER init """ from model.replication_class import Person from dataclay import getRuntime from dataclay.exceptions.exceptions import DataClayException self.session_initialized = True """ Test. From now on, the Functional Test itself. """ environments_ids = list(getRuntime().get_execution_environments_info().keys()) environment1_id = environments_ids[0] self.assertEqual(len(environments_ids), 2) # When we create a Person object we call the locally set for non-persistent object.__setattr__ p = Person('foo', 100) r = Person('fat', 200) p.make_persistent(backend_id=environment1_id) r.make_persistent(backend_id=environment1_id) self.assertEqual(p.get_master_location(), environment1_id) # name is a replicated attribute so the before method should be called before the setter # When we change the name we call a inMaster setter execute_implementation_aux('__setUpdate__', ...) p.name = 'aaa' # When we change the age we call a remote __setUpdate__ on object r.age = 78 self.assertEqual(r.age, 78) # Assert that the attribute was properly changed self.assertEqual(p.name, 'aaa') # Check that before method was called self.assertEqual(p.years, 3) logger.debug("Test OK!")
class VolatilesSimpleTestCase(unittest.TestCase): """ DataClayMock object for simulation. """ mock = SimpleMock() def setUp(self): """ PyUnit function called before every test case. Starts DataClay simulation in one Python interpreter and one Java VM. This allows us to Debug in a local machine without dockers and without a full start of DataClay (jars, configurations, ...) """ self.mock.setUp(__file__) def tearDown(self): """ Finish all services started for simulation. """ self.mock.tearDown() @pytest.mark.timeout(300, method='thread') def test(self): """Test. note that all test method names must begin with 'test.'""" """WARNING: IT IS HIGHLY RECOMMENDED TO HAVE ONE TEST ONLY TO ISOLATE FUNCTIONAL TESTS FROM EACH OTHER. i.e. Start a new Python Interpreter and JVM for each test. In the end, it means only one test in this class. """ from dataclay.api import init logger.debug('**Starting init**') init() """ Imports. Imports must be located here in order to simulate "import" order in a real scenario. VERY IMPORTANT: Imports must be located AFTER init """ from model.classes import WebSite, WebPage, URI self.session_initialized = True """ Test. From now on, the Functional Test itself. """ # Test recursive makePersistent without circular dependencies host = "bsc1.es" web_site = WebSite(host) web_site.make_persistent(alias=web_site.uri.host) uri = URI(host + "/volatilepage.html") # Volatile web_site.uri = uri # param of setter is volatile self.assertTrue(uri.is_persistent()) self.assertTrue(web_site.is_persistent()) self.assertTrue(web_site.uri.is_persistent()) # Test recursive with one circular dependency host = "bsc2.es" web_page = WebPage(host + "/foo/bsc.html") web_page.make_persistent(alias=web_page.uri.host) host = "fsf.org" web_site = WebSite(host) web_site.add_web_page( web_page) # added persistent object to a volatile web_page.add_link(web_site) # send volatile self.assertTrue(web_site.is_persistent()) self.assertTrue(web_site.uri.is_persistent()) self.assertTrue(web_page.is_persistent()) self.assertTrue(web_page.uri.is_persistent()) logger.debug("Test OK!")
class TypesTest(unittest.TestCase): """ DataClayMock object for simulation. """ mock = SimpleMock() def setUp(self): """ PyUnit function called before every test case. Starts DataClay simulation in one Python interpreter and one Java VM. This allows us to Debug in a local machine without dockers and without a full start of DataClay (jars, configurations, ...) """ self.mock.setUp(__file__) def tearDown(self): """ Finish all services started for simulation. """ self.mock.tearDown() @pytest.mark.timeout(300, method='thread') def test(self): """Test. note that all test method names must begin with 'test.'""" """WARNING: IT IS HIGHLY RECOMMENDED TO HAVE ONE TEST ONLY TO ISOLATE FUNCTIONAL TESTS FROM EACH OTHER. i.e. Start a new Python Interpreter and JVM for each test. In the end, it means only one test in this class. """ from dataclay.api import init logger.debug('**Starting init**') init() """ Imports. Imports must be located here in order to simulate "import" order in a real scenario. VERY IMPORTANT: Imports must be located AFTER init """ from model.classes import Player, Carrer self.session_initialized = True """ Test. From now on, the Functional Test itself. """ messi = Player("Leo", "Messi", 30) messi.add_role("forward") messi.add_skill("dribbling") messi_carrer = Carrer() stats_2016_2017 = dict() stats_2016_2017["goal"] = 54 stats_2016_2017["presence"] = 52 messi_carrer.add_stat("2016/2017", stats_2016_2017) messi_carrer.add_team("2016/2017", "Barcelona FC") messi.add_carrer(messi_carrer) messi.add_test_types(True, 2, "Str") messi.make_persistent() self.assertTrue(messi.is_persistent) self.assertTrue(messi.carrer.is_persistent) # Test NewVersion version_info, unloaded_version_info = messi.new_version(list(messi.get_all_locations().keys())[0]) logger.debug("Version info are:\n%s", version_info) versionOID = version_info.versionOID messi_version = Player.get_object_by_id(versionOID) logger.debug("New version of messi is:\n%s", messi_version) # NewVersion ID is different self.assertNotEqual(messi.get_object_id(), messi_version.get_object_id()) # NewVersion fields are the same of the original self.assertEqual(messi.a, messi_version.a) self.assertEqual(messi.b, messi_version.b) self.assertEqual(messi.c, messi_version.c) self.assertEqual(messi.skills, messi_version.skills) self.assertEqual(messi.roles, messi_version.roles) self.assertEqual(messi.personal_info, messi_version.personal_info) self.assertEqual(messi.carrer.teams, messi_version.carrer.teams) self.assertEqual(messi.carrer.stats, messi_version.carrer.stats) stats_2015_2016 = dict() stats_2015_2016["goal"] = 41 stats_2015_2016["presence"] = 49 messi_version.carrer.add_team("2015/2016", "Barcelona FC") messi_version.carrer.add_stat("2015/2016", stats_2015_2016) self.assertNotEqual(messi.carrer.stats, messi_version.carrer.stats) self.assertNotEqual(messi.carrer.teams, messi_version.carrer.teams) self.assertEqual(len(messi.carrer.teams), 1) self.assertEqual(len(messi_version.carrer.teams), 2) self.assertEqual(len(messi.carrer.stats), 1) self.assertEqual(len(messi_version.carrer.stats), 2) messi.consolidate_version(unloaded_version_info) self.assertEqual(len(messi.carrer.teams), 2) self.assertEqual(len(messi.carrer.stats), 2) logger.debug("Test OK!")
class MakePersistentExtraMethodsTest(unittest.TestCase): """ DataClayMock object for simulation. """ mock = SimpleMock() def setUp(self): """ PyUnit function called before every test case. Starts DataClay simulation in one Python interpreter and one Java VM. This allows us to Debug in a local machine without dockers and without a full start of DataClay (jars, configurations, ...) """ self.mock.setUp(__file__) def tearDown(self): """ Finish all services started for simulation. """ self.mock.tearDown() @pytest.mark.timeout(300, method='thread') def test(self): """Test. note that all test method names must begin with 'test.'""" # WARNING: IT IS HIGHLY RECOMMENDED TO HAVE ONE TEST ONLY TO ISOLATE FUNCTIONAL TESTS FROM EACH OTHER. i.e. # Start a new Python Interpreter and JVM for each test. In the end, it means only one test in this class. from dataclay.api import init logger.debug('**Starting init**') init() # Imports. Imports must be located here in order to simulate "import" order in a real scenario. # VERY IMPORTANT: Imports must be located AFTER init from model.classes import FancyUUMethods self.session_initialized = True # Test. From now on, the Functional Test itself. o = FancyUUMethods(42, "Hello World") self.assertEqual("%s" % o, "Message[42]: Hello World") o.make_persistent() self.assertEqual("%s" % o, "Message[42]: Hello World") p = FancyUUMethods(42, "Not Hello World") q = FancyUUMethods(43, "Hello World") p.make_persistent("p_obj") q.make_persistent("q_obj") self.assertEqual(o, p) self.assertNotEqual(o, q) self.assertNotEqual(p, q) p_bis = FancyUUMethods.get_by_alias("p_obj") q_bis = FancyUUMethods.get_by_alias("q_obj") self.assertEqual(o, p_bis) self.assertNotEqual(o, q_bis) self.assertNotEqual(p_bis, q_bis) s = set() s.add(o) self.assertEqual(len(s), 1) s.add(p_bis) # this won't add an object because o == p self.assertEqual(len(s), 1) s.add(q_bis) self.assertEqual(len(s), 2) self.assertIn(o, s) self.assertIn(p, s) # this works because o == p self.assertIn(q, s) logger.debug("Test OK!")
class TypesTest(unittest.TestCase): """ DataClayMock object for simulation. """ mock = SimpleMock() def setUp(self): """ PyUnit function called before every test case. Starts DataClay simulation in one Python interpreter and one Java VM. This allows us to Debug in a local machine without dockers and without a full start of DataClay (jars, configurations, ...) """ self.mock.setUp(__file__, nodes=2) def tearDown(self): """ Finish all services started for simulation. """ self.mock.tearDown() @pytest.mark.timeout(300, method='thread') def test(self): """Test. note that all test method names must begin with 'test.'""" """WARNING: IT IS HIGHLY RECOMMENDED TO HAVE ONE TEST ONLY TO ISOLATE FUNCTIONAL TESTS FROM EACH OTHER. i.e. Start a new Python Interpreter and JVM for each test. In the end, it means only one test in this class. """ from dataclay.api import init logger.debug('**Starting init**') init() """ Imports. Imports must be located here in order to simulate "import" order in a real scenario. VERY IMPORTANT: Imports must be located AFTER init """ from model.classes import Player, Carrer from dataclay import getRuntime from dataclay.DataClayObjProperties import DCLAY_GETTER_PREFIX self.session_initialized = True """ Test. From now on, the Functional Test itself. """ environments_ids = list( getRuntime().get_execution_environments_info().keys()) environment1_id = environments_ids[0] environment2_id = environments_ids[1] messi = Player("Leo", "Messi", 30) messi.add_role("forward") messi.add_skill("dribbling") messi_carrer = Carrer() stats_2016_2017 = dict() stats_2016_2017["goal"] = 54 stats_2016_2017["presence"] = 52 messi_carrer.add_stat("2016/2017", stats_2016_2017) messi_carrer.add_team("2016/2017", "Barcelona FC") messi.add_carrer(messi_carrer) messi.add_test_types(True, 2, "Str") messi.make_persistent(backend_id=environment1_id) self.assertTrue(messi.is_persistent) self.assertTrue(messi.carrer.is_persistent) messi.new_replica(backend_id=environment2_id) # Updates locations after replication messi_locations = messi.get_all_locations() messi_carrer_locations = messi.carrer.get_all_locations() # Check that object is replicated self.assertEqual(len(messi_locations), 2) self.assertIn(environment1_id, messi_locations) self.assertIn(environment2_id, messi_locations) # Check that associated objects are replicated self.assertIn(environment2_id, messi_carrer_locations) replicated_messi_carrer = messi.run_remote( environment2_id, DCLAY_GETTER_PREFIX + 'carrer', None) replicated_messi_teams = replicated_messi_carrer.run_remote( environment2_id, DCLAY_GETTER_PREFIX + 'teams', None) replicated_messi_stats = replicated_messi_carrer.run_remote( environment2_id, DCLAY_GETTER_PREFIX + 'stats', None) self.assertEqual(messi.carrer.teams, replicated_messi_teams) self.assertEqual(messi.carrer.stats, replicated_messi_stats) logger.info("Messi replicated stats are %s", replicated_messi_stats) logger.debug("Test OK!")
class addAliasTest(unittest.TestCase): """ DataClayMock object for simulation. """ mock = SimpleMock() def setUp(self): """ PyUnit function called before every test case. Starts DataClay simulation in one Python interpreter and one Java VM. This allows us to Debug in a local machine without dockers and without a full start of DataClay (jars, configurations, ...) """ self.mock.setUp(__file__) def tearDown(self): """ Finish all services started for simulation. """ self.mock.tearDown() @pytest.mark.timeout(300, method='thread') def test(self): """Test. note that all test method names must begin with 'test.'""" """WARNING: IT IS HIGHLY RECOMMENDED TO HAVE ONE TEST ONLY TO ISOLATE FUNCTIONAL TESTS FROM EACH OTHER. i.e. Start a new Python Interpreter and JVM for each test. In the end, it means only one test in this class. """ from dataclay.api import init logger.debug('**Starting init**') init() """ Imports. Imports must be located here in order to simulate "import" order in a real scenario. VERY IMPORTANT: Imports must be located AFTER init """ from model.classes import Person self.session_initialized = True """ Test. From now on, the Functional Test itself. """ alias = "my_object" alias2 = "my_object2" # Test make_persistent person = Person(name='Nikola', age=86) person.make_persistent() # Verify object_iD is not null object_id = person.get_object_id() self.assertTrue(object_id != None) self.assertRaises(Exception, Person.get_by_alias, alias=alias) person.make_persistent(alias) # check you can get person by alias without exception Person.get_by_alias(alias=alias) # Get new_person with alias new_person = Person.get_by_alias(alias) # Verify result self.assertTrue(object_id == new_person.get_object_id()) # Test delete_alias Person.delete_alias(alias) self.assertRaises(Exception, Person.get_by_alias, alias=alias) person.make_persistent(alias2) new_person2 = Person.get_by_alias(alias2) self.assertTrue(object_id == new_person2.get_object_id()) self.assertTrue( new_person2.get_object_id() == new_person.get_object_id()) # check you can get person by alias without exception Person.get_by_alias(alias=alias2) person.make_persistent(alias) new_person3 = Person.get_by_alias(alias) self.assertTrue(object_id == new_person3.get_object_id()) self.assertTrue( new_person3.get_object_id() == new_person2.get_object_id()) logger.debug("Test OK!")
class UpdateWithReplicaTest(unittest.TestCase): """ DataClayMock object for simulation. """ mock = SimpleMock() def setUp(self): """ PyUnit function called before every test case. Starts DataClay simulation in one Python interpreter and one Java VM. This allows us to Debug in a local machine without dockers and without a full start of DataClay (jars, configurations, ...) """ self.mock.setUp(__file__, nodes=2) def tearDown(self): """ Finish all services started for simulation. """ self.mock.tearDown() @pytest.mark.timeout(300, method='thread') def test(self): """Test. note that all test method names must begin with 'test.'""" """WARNING: IT IS HIGHLY RECOMMENDED TO HAVE ONE TEST ONLY TO ISOLATE FUNCTIONAL TESTS FROM EACH OTHER. i.e. Start a new Python Interpreter and JVM for each test. In the end, it means only one test in this class. """ from dataclay.api import init logger.debug('**Starting init**') init() """ Imports. Imports must be located here in order to simulate "import" order in a real scenario. VERY IMPORTANT: Imports must be located AFTER init """ from model.classes import WebSite, WebPage, URI from dataclay import getRuntime self.session_initialized = True """ Test. From now on, the Functional Test itself. """ environments_ids = list( getRuntime().get_execution_environments_info().keys()) environment1_id = environments_ids[0] environment2_id = environments_ids[1] host = "bsc.es" web_site = WebSite(host) web_page = WebPage(host + "/page.html") # Verify object_iD is not null object_id = web_site.get_object_id() self.assertTrue(object_id != None) web_site.add_web_page(web_page) web_site.make_persistent(backend_id=environment1_id) web_site.new_replica(backend_id=environment2_id) ws_locations = web_site.get_all_locations() self.assertEqual(len(ws_locations), 2) # Clone the web_site web_site_copy = web_site.dc_clone() self.assertEqual(len(web_site_copy.pages), len(web_site.pages)) nondefaultvalue = "notfoo" web_site_copy.replyme = nondefaultvalue # Add a web page to cloned web_site web_page2 = WebPage(host + "/page2.html") web_site_copy.add_web_page(web_page2) # Update original web_site web_site.dc_update(web_site_copy) self.assertFalse(web_site_copy.is_persistent()) # Check updates self.assertEqual(len(web_site_copy.pages), len(web_site.pages)) wrong = False from dataclay.DataClayObjProperties import DCLAY_GETTER_PREFIX for ws_location in ws_locations: value = web_site.run_remote(ws_location, DCLAY_GETTER_PREFIX + "replyme", None) if value != nondefaultvalue: wrong = True self.assertFalse(wrong) logger.debug("Test OK!")
class TypesTest(unittest.TestCase): """ DataClayMock object for simulation. """ mock = SimpleMock() def setUp(self): """ PyUnit function called before every test case. Starts DataClay simulation in one Python interpreter and one Java VM. This allows us to Debug in a local machine without dockers and without a full start of DataClay (jars, configurations, ...) """ self.mock.setUp(__file__, nodes=2) def tearDown(self): """ Finish all services started for simulation. """ self.mock.tearDown() @unittest.skip("skipping nested type test") @pytest.mark.timeout(300, method='thread') def test(self): """Test. note that all test method names must begin with 'test.'""" """WARNING: IT IS HIGHLY RECOMMENDED TO HAVE ONE TEST ONLY TO ISOLATE FUNCTIONAL TESTS FROM EACH OTHER. i.e. Start a new Python Interpreter and JVM for each test. In the end, it means only one test in this class. """ from dataclay.api import init logger.debug('**Starting init**') init() """ Imports. Imports must be located here in order to simulate "import" order in a real scenario. VERY IMPORTANT: Imports must be located AFTER init """ from model.nested_classes import NestedColl from dataclay import getRuntime from dataclay.DataClayObjProperties import DCLAY_GETTER_PREFIX self.session_initialized = True """ Test. From now on, the Functional Test itself. """ environments_ids = list( getRuntime().get_execution_environments_info().keys()) environment1_id = environments_ids[0] environment2_id = environments_ids[1] dict_a = dict() dict_a["test"] = 1 set_a = set() set_a.add(1) main_list = [dict_a, [1, 2, 3, 4, 5], (1, 2, 3, 4, 5), set_a] self.assertEqual(type(main_list[0]), dict) self.assertEqual(type(main_list[1]), list) self.assertEqual(type(main_list[2]), tuple) self.assertEqual(type(main_list[3]), set) main_dict = dict() main_dict["dict"] = dict_a main_dict["list"] = [1, 2, 3, 4] main_dict["tuple"] = (1, 2, 3, 4) main_dict["set"] = set_a self.assertEqual(type(main_dict["dict"]), dict) self.assertEqual(type(main_dict["list"]), list) self.assertEqual(type(main_dict["tuple"]), tuple) self.assertEqual(type(main_dict["set"]), set) main_tuple = (dict_a, [1, 2, 3, 4, 5], (1, 2, 3, 4, 5), set_a) self.assertEqual(type(main_tuple[0]), dict) self.assertEqual(type(main_tuple[1]), list) self.assertEqual(type(main_tuple[2]), tuple) self.assertEqual(type(main_tuple[3]), set) main_set = set() main_set.add((1, 2, 3, 4)) main_set.add(1) main_set.add("a") self.assertIn(1, main_set) self.assertIn("a", main_set) self.assertIn((1, 2, 3, 4), main_set) nested_coll = NestedColl(main_list, main_dict, main_tuple, main_set) # Test Persistence nested_coll.make_persistent(backend_id=environment1_id) self.assertEqual(main_list, nested_coll.a) self.assertEqual(main_dict, nested_coll.b) self.assertEqual(main_tuple, nested_coll.c) self.assertEqual(main_set, set(nested_coll.d)) # Test Replication nested_coll.new_replica(backend_id=environment2_id) nested_coll_locations = nested_coll.get_all_locations() # Check that object is replicated self.assertEqual(len(nested_coll_locations), 2) self.assertIn(environment1_id, nested_coll_locations) self.assertIn(environment2_id, nested_coll_locations) replicated_list = nested_coll.run_remote(environment2_id, DCLAY_GETTER_PREFIX + 'a', None) replicated_dict = nested_coll.run_remote(environment2_id, DCLAY_GETTER_PREFIX + 'b', None) replicated_tuple = nested_coll.run_remote(environment2_id, DCLAY_GETTER_PREFIX + 'c', None) replicated_set = nested_coll.run_remote(environment2_id, DCLAY_GETTER_PREFIX + 'd', None) self.assertEqual(replicated_list, nested_coll.a) self.assertEqual(replicated_dict, nested_coll.b) self.assertEqual(replicated_tuple, nested_coll.c) self.assertEqual(replicated_set, nested_coll.d) # Test Version version_info, unloaded_version_info = nested_coll.new_version( environment1_id) logger.debug("Version info are:\n%s", version_info) versionOID = version_info.versionOID nested_coll_version = NestedColl.get_object_by_id(versionOID) logger.debug("New version of nested_coll is:\n%s", nested_coll_version) # NewVersion ID is different self.assertNotEqual(nested_coll.get_object_id(), nested_coll_version.get_object_id()) # NewVersion fields are the same of the original self.assertEqual(nested_coll.a, nested_coll_version.a) self.assertEqual(nested_coll.b, nested_coll_version.b) self.assertEqual(nested_coll.c, nested_coll_version.c) self.assertEqual(nested_coll.d, nested_coll_version.d) # Change fields and check that they are different from the original one dict_b = dict() dict_b["version"] = 23 set_b = set() set_b.add(34) main_vers_list = [dict_b, [34, 2, 32, 4, 5], (1, 25, 3, 4, 5), set_b] main_vers_tuple = (dict_b, [1, 2, 35, 4, 5], (1, 2, 3, 42, 5), set_b) main_vers_dict = dict() main_vers_dict["vdict"] = dict_b main_vers_dict["vlist"] = [1, 2, 3, 4, 3] main_vers_dict["vtuple"] = (4, 2, 3, 4, 2) main_vers_dict["vset"] = set_b main_vers_set = set() main_vers_set.add((2, 4, 6, 3)) main_vers_set.add(3) main_vers_set.add("c") nested_coll_version.change_fields(main_vers_list, main_vers_dict, main_vers_tuple, main_vers_set) self.assertNotEqual(nested_coll.a, nested_coll_version.a) self.assertNotEqual(nested_coll.b, nested_coll_version.b) self.assertNotEqual(nested_coll.c, nested_coll_version.c) self.assertNotEqual(nested_coll.d, nested_coll_version.d) nested_coll.consolidate_version(unloaded_version_info) self.assertEqual(main_vers_list, nested_coll.a) self.assertEqual(main_vers_dict, nested_coll.b) self.assertEqual(main_vers_tuple, nested_coll.c) self.assertEqual(main_vers_set, set(nested_coll.d)) logger.debug("Test OK!")
class GetAllLocationTest(unittest.TestCase): """ DataClayMock object for simulation. """ mock = SimpleMock() def setUp(self): """ PyUnit function called before every test case. Starts DataClay simulation in one Python interpreter and one Java VM. This allows us to Debug in a local machine without dockers and without a full start of DataClay (jars, configurations, ...) """ self.mock.setUp(__file__) def tearDown(self): """ Finish all services started for simulation. """ self.mock.tearDown() @pytest.mark.timeout(300, method='thread') def test(self): """Test. note that all test method names must begin with 'test.'""" """WARNING: IT IS HIGHLY RECOMMENDED TO HAVE ONE TEST ONLY TO ISOLATE FUNCTIONAL TESTS FROM EACH OTHER. i.e. Start a new Python Interpreter and JVM for each test. In the end, it means only one test in this class. """ from dataclay.api import init logger.debug('**Starting init**') init() """ Imports. Imports must be located here in order to simulate "import" order in a real scenario. VERY IMPORTANT: Imports must be located AFTER init """ from model.classes import WebSite, WebPage, URI from dataclay import getRuntime self.session_initialized = True """ Test. From now on, the Functional Test itself. """ host = "bsc.es" web_site = WebSite(host) environments_ids = list(getRuntime().get_execution_environments_info().keys()) self.assertEqual(len(environments_ids), 1) environment1_id = environments_ids[0] # MakePersistent in location1 web_site.make_persistent(backend_id=environment1_id) object_id = web_site.get_object_id() all_locations_ids = list(web_site.get_all_locations().keys()) backend_id = all_locations_ids[0] # Assert that backend_id of persistent object is environment1 self.assertTrue(web_site.is_persistent()) self.assertIsNotNone(object_id) self.assertEqual(backend_id, environment1_id) # All_locations just contain environment1_id self.assertIn(environment1_id, all_locations_ids) self.assertEqual(len(all_locations_ids), 1) logger.debug("Test OK!")
class MoveDoubleTest(unittest.TestCase): """ DataClayMock object for simulation. """ mock = SimpleMock() def setUp(self): """ PyUnit function called before every test case. Starts DataClay simulation in one Python interpreter and one Java VM. This allows us to Debug in a local machine without dockers and without a full start of DataClay (jars, configurations, ...) """ self.mock.setUp(__file__, nodes=3) def tearDown(self): """ Finish all services started for simulation. """ self.mock.tearDown() @pytest.mark.timeout(300, method='thread') def test(self): """Test. note that all test method names must begin with 'test.'""" """WARNING: IT IS HIGHLY RECOMMENDED TO HAVE ONE TEST ONLY TO ISOLATE FUNCTIONAL TESTS FROM EACH OTHER. i.e. Start a new Python Interpreter and JVM for each test. In the end, it means only one test in this class. """ from dataclay.api import init logger.debug('**Starting init**') init() """ Imports. Imports must be located here in order to simulate "import" order in a real scenario. VERY IMPORTANT: Imports must be located AFTER init """ from model.classes import WebSite, WebPage, URI from dataclay import getRuntime self.session_initialized = True """ Test. From now on, the Functional Test itself. """ # Test recursive makePersistent without circular dependencies host = "bsc.es" web_site = WebSite(host) web_page = WebPage(host + "/page.html") web_site.add_web_page(web_page) environments_ids = list( getRuntime().get_execution_environments_info().keys()) environment1_id = environments_ids[0] environment2_id = environments_ids[1] environment3_id = environments_ids[2] self.assertEqual(len(environments_ids), 3) web_site.make_persistent(alias=web_site.uri.host, backend_id=environment1_id) ws_locations = web_site.get_all_locations() ws_uri_locations = web_site.uri.get_all_locations() wp_locations = web_page.get_all_locations() wp_uri_locations = web_page.uri.get_all_locations() # Check Persistence self.assertTrue(web_site.is_persistent()) self.assertEqual(len(ws_locations), 1) self.assertIn(environment1_id, ws_locations) self.assertTrue(web_site.uri.is_persistent()) self.assertEqual(len(ws_uri_locations), 1) self.assertIn(environment1_id, ws_uri_locations) self.assertTrue(web_page.is_persistent()) self.assertEqual(len(wp_locations), 1) self.assertIn(environment1_id, wp_locations) self.assertTrue(web_page.uri.is_persistent()) self.assertEqual(len(wp_uri_locations), 1) self.assertIn(environment1_id, wp_uri_locations) # Move in the second location getRuntime().move_object(web_site, environment1_id, environment2_id, True) ws_locations = web_site.get_all_locations() ws_uri_locations = web_site.uri.get_all_locations() wp_locations = web_page.get_all_locations() wp_uri_locations = web_page.uri.get_all_locations() # Check that the object and associated ones are now in the second location self.assertEqual(len(ws_locations), 1) self.assertIn(environment2_id, ws_locations) self.assertEqual(len(ws_uri_locations), 1) self.assertIn(environment2_id, ws_uri_locations) self.assertEqual(len(wp_locations), 1) self.assertIn(environment2_id, wp_locations) self.assertEqual(len(wp_uri_locations), 1) self.assertIn(environment2_id, wp_uri_locations) # Move in the third location getRuntime().move_object(web_site, environment2_id, environment3_id, True) ws_locations = web_site.get_all_locations() ws_uri_locations = web_site.uri.get_all_locations() wp_locations = web_page.get_all_locations() wp_uri_locations = web_page.uri.get_all_locations() # Check that the object and associated ones are now in the third location self.assertEqual(len(ws_locations), 1) self.assertIn(environment3_id, ws_locations) self.assertEqual(len(ws_uri_locations), 1) self.assertIn(environment3_id, ws_uri_locations) self.assertEqual(len(wp_locations), 1) self.assertIn(environment3_id, wp_locations) self.assertEqual(len(wp_uri_locations), 1) self.assertIn(environment3_id, wp_uri_locations) logger.debug("Test OK!")
class MakePersistentWithStrMethodTest(unittest.TestCase): """ DataClayMock object for simulation. """ mock = SimpleMock() def setUp(self): """ PyUnit function called before every test case. Starts DataClay simulation in one Python interpreter and one Java VM. This allows us to Debug in a local machine without dockers and without a full start of DataClay (jars, configurations, ...) """ self.mock.setUp(__file__) def tearDown(self): """ Finish all services started for simulation. """ self.mock.tearDown() @pytest.mark.timeout(300, method='thread') def test(self): """Test. note that all test method names must begin with 'test.'""" # WARNING: IT IS HIGHLY RECOMMENDED TO HAVE ONE TEST ONLY TO ISOLATE FUNCTIONAL TESTS FROM EACH OTHER. i.e. # Start a new Python Interpreter and JVM for each test. In the end, it means only one test in this class. from dataclay.api import init logger.debug('**Starting init**') init() # Imports. Imports must be located here in order to simulate "import" order in a real scenario. # VERY IMPORTANT: Imports must be located AFTER init from model.classes import HasEqMethod self.session_initialized = True # Test. From now on, the Functional Test itself. o = HasEqMethod(1) p = HasEqMethod(1) q = HasEqMethod(1) r = HasEqMethod(2) self.assertEqual(o, p) self.assertEqual(o, q) self.assertNotEqual(o, r) o.make_persistent() # This is triggering something like p == o, which doesn't matter self.assertEqual(p, o) # This is triggering something like o == q, # which will remote call the __eq__ method on a persistent object (o object) # which will trigger q to become a volatile # which will as a matter of act make q persistent (not exactly, but close) self.assertEqual(o, q) p.make_persistent("alias_p") # q was persistent , so this silently fails, no alias associated q.make_persistent("alias_q") r.make_persistent("alias_r") self.assertEqual(o, p) self.assertEqual(o, q) self.assertNotEqual(o, r) p_bis = HasEqMethod.get_by_alias("alias_p") # this fails because q has not been assigned an alias q_bis = HasEqMethod.get_by_alias("alias_q") r_bis = HasEqMethod.get_by_alias("alias_r") self.assertEqual(o, p_bis) self.assertEqual(o, q_bis) self.assertNotEqual(o, r_bis) logger.debug("Test OK!")
class ConsolidateVersionTest(unittest.TestCase): """ DataClayMock object for simulation. """ mock = SimpleMock() def setUp(self): """ PyUnit function called before every test case. Starts DataClay simulation in one Python interpreter and one Java VM. This allows us to Debug in a local machine without dockers and without a full start of DataClay (jars, configurations, ...) """ self.mock.setUp(__file__) def tearDown(self): """ Finish all services started for simulation. """ self.mock.tearDown() @pytest.mark.timeout(300, method='thread') def test(self): """Test. note that all test method names must begin with 'test.'""" """WARNING: IT IS HIGHLY RECOMMENDED TO HAVE ONE TEST ONLY TO ISOLATE FUNCTIONAL TESTS FROM EACH OTHER. i.e. Start a new Python Interpreter and JVM for each test. In the end, it means only one test in this class. """ from dataclay.api import init logger.debug('**Starting init**') init() """ Imports. Imports must be located here in order to simulate "import" order in a real scenario. VERY IMPORTANT: Imports must be located AFTER init """ from model.classes import Person self.session_initialized = True """ Test. From now on, the Functional Test itself. """ # Test makePersistent person = Person(name='Nikola', age=86) person.make_persistent(alias="Tesla") self.assertTrue(person.is_persistent()) # Create newVersion and change name and age of it for k, v in person.get_all_locations().items(): version_info, unloaded_version_info = person.new_version(k) versionOID = version_info.versionOID person_version = Person.get_object_by_id(versionOID) person_version.name = "Thomas" person_version.age = 84 # Test ConsolidateVersion person.consolidate_version(unloaded_version_info) # Check that fields are consolidated self.assertEqual(person.name, "Thomas") self.assertEqual(person.age, 84) self.assertEqual(Person.get_by_alias("Tesla").name, "Thomas") self.assertEqual(Person.get_by_alias("Tesla").age, 84) logger.debug("After Consolidate, new name: %s and new age: %s", person.name, person.age) logger.debug("Test OK!")
class ExecuteImplementationTest(unittest.TestCase): """ DataClayMock object for simulation. """ mock = SimpleMock() def setUp(self): """ PyUnit function called before every test case. Starts DataClay simulation in one Python interpreter and one Java VM. This allows us to Debug in a local machine without dockers and without a full start of DataClay (jars, configurations, ...) """ self.mock.setUp(__file__) def tearDown(self): """ Finish all services started for simulation. """ self.mock.tearDown() @pytest.mark.timeout(300, method='thread') def test(self): """Test. note that all test method names must begin with 'test.'""" """WARNING: IT IS HIGHLY RECOMMENDED TO HAVE ONE TEST ONLY TO ISOLATE FUNCTIONAL TESTS FROM EACH OTHER. i.e. Start a new Python Interpreter and JVM for each test. In the end, it means only one test in this class. """ from dataclay.api import init logger.debug('**Starting init**') init() """ Imports. Imports must be located here in order to simulate "import" order in a real scenario. VERY IMPORTANT: Imports must be located AFTER init """ from model.classes import WebSite, WebPage, URI self.session_initialized = True """ Test. From now on, the Functional Test itself. """ host = "bsc.es" host2 = "bsc.en" web_site = WebSite(host) web_page = WebPage(host + "/main.html") web_page2 = WebPage(host2 + "/who.html") uri1 = URI(host2 + "/where.html") uri2 = URI(host + "/why.html") # Verify object_iD is not null object_id = web_site.get_object_id() self.assertIsNotNone(object_id) # Execute method locally web_site.add_web_page(web_page) self.assertTrue(len(web_site.pages) > 0) # Execute setter and getter locally web_site.uri = uri1 self.assertEqual(web_site.uri.host, host2) # Test make_persistent web_site.make_persistent() self.assertTrue(web_site.is_persistent()) # Execute method remotely web_site.add_web_page(web_page2) self.assertEqual(len(web_site.pages), 2) # Execute setter and getter locally web_site.uri = uri2 # Get of get on volatiles cause exception self.assertEqual(web_site.uri.host, host) logger.debug("Test OK!")
class MakePersistentTest(unittest.TestCase): """ DataClayMock object for simulation. """ mock = SimpleMock() def setUp(self): """ PyUnit function called before every test case. Starts DataClay simulation in one Python interpreter and one Java VM. This allows us to Debug in a local machine without dockers and without a full start of DataClay (jars, configurations, ...) """ self.mock.setUp(__file__) def tearDown(self): """ Finish all services started for simulation. """ self.mock.tearDown() @pytest.mark.timeout(300, method='thread') def test(self): """Test. note that all test method names must begin with 'test.'""" """WARNING: IT IS HIGHLY RECOMMENDED TO HAVE ONE TEST ONLY TO ISOLATE FUNCTIONAL TESTS FROM EACH OTHER. i.e. Start a new Python Interpreter and JVM for each test. In the end, it means only one test in this class. """ from dataclay.api import init logger.debug('**Starting init**') init() """ Imports. Imports must be located here in order to simulate "import" order in a real scenario. VERY IMPORTANT: Imports must be located AFTER init """ from model.classes import WebSite, WebPage, URI self.session_initialized = True """ Test. From now on, the Functional Test itself. """ host = "bsc.es" web_site = WebSite(host) web_page = WebPage(host + "/page.html") web_page2 = WebPage(host + "/page2.html") # Verify object_iD is not null object_id = web_site.get_object_id() self.assertTrue(object_id != None) web_site.add_web_page(web_page) self.assertEqual(len(web_site.pages), 1) # Test make_persistent web_site.make_persistent(recursive=False) self.assertTrue(web_site.is_persistent()) # TODO: Test better this # Add another page after persistence but web_site.uri.host is not persistent # (See add_we_page method in model/classes.py) self.assertRaises(Exception, web_site.add_web_page, web_page2) logger.debug("Test OK!")
class RemoteGetterTestCase(unittest.TestCase): """ DataClayMock object for simulation. """ mock = SimpleMock() def setUp(self): """ PyUnit function called before every test case. Starts DataClay simulation in one Python interpreter and one Java VM. This allows us to Debug in a local machine without dockers and without a full start of DataClay (jars, configurations, ...) """ self.mock.setUp(__file__, nodes=2) def tearDown(self): """ Finish all services started for simulation. """ self.mock.tearDown() @pytest.mark.timeout(300, method='thread') def test(self): """Test. note that all test method names must begin with 'test.'""" """WARNING: IT IS HIGHLY RECOMMENDED TO HAVE ONE TEST ONLY TO ISOLATE FUNCTIONAL TESTS FROM EACH OTHER. i.e. Start a new Python Interpreter and JVM for each test. In the end, it means only one test in this class. """ from dataclay.api import init logger.debug('**Starting init**') init() """ Imports. Imports must be located here in order to simulate "import" order in a real scenario. VERY IMPORTANT: Imports must be located AFTER init """ from model.classes import WebSite, WebPage from dataclay import getRuntime self.session_initialized = True """ Test. From now on, the Functional Test itself. """ execs_info = getRuntime().get_execution_environments_info() exec_env_info_1 = execs_info[list(execs_info.keys())[0]] exec_env_info_2 = execs_info[list(execs_info.keys())[1]] host = "bsc.es" web_site = WebSite(host) web_site.make_persistent(alias=web_site.uri.host, backend_id=exec_env_info_1.dataClayID) web_page = WebPage(host + "/page.html") web_page.make_persistent(backend_id=exec_env_info_2.dataClayID) web_site.add_web_page(web_page) self.assertTrue(web_site.is_persistent()) self.assertTrue(web_site.uri.is_persistent()) self.assertTrue(web_page.is_persistent()) # volatile is persistent self.assertTrue(web_page.uri.is_persistent()) # volatile is persistent logger.debug("Test OK!")