示例#1
0
    def test(self):
        """Test. note that all test method names must begin with 'test.'"""
        """WARNING: IT IS HIGHLY RECOMMENDED TO HAVE ONE TEST ONLY TO ISOLATE FUNCTIONAL TESTS FROM EACH OTHER. i.e. 
        Start a new Python Interpreter and JVM for each test. In the end, it means only one test in this class. """
        from dataclay.api import init, finish

        logger.debug('**Starting init**')
        init()
        
        """ 
        Imports. Imports must be located here in order to simulate "import" order in a real scenario. 
        VERY IMPORTANT: Imports must be located AFTER init
        """
        from model.classes import WebSite, WebPage, URI
        self.session_initialized = True
    
        """
        Test. From now on, the Functional Test itself. 
        """
        uri = URI("host/bsc")
        try:
            uri.make_persistent()
        except:
            traceback.print_exc()
        
        from dataclay import getRuntime
        getRuntime().close_session()
        
        # Check if object exists
        while self.mock.mock.mock_dataclay.objectExists(str(uri.get_object_id())):
            print("Waiting... ")
            time.sleep(5)
            
        logger.debug("Test OK!")
	    
示例#2
0
    def test(self):
        """Test. note that all test method names must begin with 'test.'"""
        """WARNING: IT IS HIGHLY RECOMMENDED TO HAVE ONE TEST ONLY TO ISOLATE FUNCTIONAL TESTS FROM EACH OTHER. i.e. 
        Start a new Python Interpreter and JVM for each test. In the end, it means only one test in this class. """
        from dataclay.api import init

        logger.debug('**Starting init**')
        init()
        """ 
        Imports. Imports must be located here in order to simulate "import" order in a real scenario. 
        VERY IMPORTANT: Imports must be located AFTER init
        """
        from model.classes import Person
        from dataclay import getRuntime
        from dataclay.commonruntime.Settings import settings

        self.session_initialized = True
        """
        Test. From now on, the Functional Test itself. 
        """
        lm_client = getRuntime().ready_clients["@LM"]
        alias = "test_alias"

        # Test make_persistent
        person = Person(name='Nikola', age=86)
        person.make_persistent(alias)

        # Verify object_iD is not null
        object_id = person.get_object_id()

        self.assertTrue(object_id != None)
        # check you can get person by alias without exception
        Person.get_by_alias(alias=alias)

        # Verify that object is in DB/Cache/Metadata
        # TODO: Missing the check on DB

        metadata = lm_client.get_metadata_by_oid(settings.current_session_id,
                                                 object_id)
        alias_cache = getRuntime().alias_cache

        self.assertIn(alias, metadata.aliases)
        self.assertIn(alias, alias_cache)

        # Test delete_alias
        Person.delete_alias(alias)
        self.assertRaises(Exception, Person.get_by_alias, alias=alias)

        # Verify that object is not in DB/Cache/Metadata
        metadata = lm_client.get_metadata_by_oid(settings.current_session_id,
                                                 object_id)
        alias_cache = getRuntime().alias_cache

        self.assertNotIn(alias, metadata.aliases)
        self.assertNotIn(alias, alias_cache)

        logger.debug("Test OK!")
示例#3
0
def unfederate(ext_dataclay_id=None):
    """ Unfederate all objects belonging to/federated with external data clay with id provided 
    or with all any external dataclay if no argument provided. 
    :param ext_dataclay_id: external dataClay id
    :return: None
    :type ext_dataclay_id: uuid 
    :rtype: None
    """
    if ext_dataclay_id is not None:
        return getRuntime().unfederate_all_objects(ext_dataclay_id)
    else:
        return getRuntime().unfederate_all_objects_with_all_dcs()
示例#4
0
def federate_all_objects(dest_dataclay_id):
    """ Federate all objects from current dataclay to destination dataclay
    :param dest_dataclay_id destination dataclay id
    :return: None
    :rtype: None
    """
    return getRuntime().federate_all_objects(dest_dataclay_id)
示例#5
0
def get_backend_id_by_name(name):
    """Return dataClay backend present in the system with name provided."""
    all_backends = getRuntime().get_execution_environments_info()
    for backend in all_backends.values():
        if backend.name == name:
            return backend.dataClayID
    return None
示例#6
0
 def getObject(self, oid):
     obj_id, class_id = oid.split(":")
     obj_id = uuid.UUID(obj_id)
     class_id = uuid.UUID(class_id)
     return getRuntime().get_object_by_id(obj_id,
                                          hint=self.backend_id,
                                          class_id=class_id)
示例#7
0
def get_external_backend_id_by_name(name, external_dataclay_id):
    """Return dataClay backend present in the system with name provided."""
    all_backends = getRuntime().get_all_execution_environments_at_dataclay(
        external_dataclay_id)
    for backend in all_backends.values():
        if backend.name == name:
            return backend.id
    return None
示例#8
0
def reinitialize_clients():
    runtime = getRuntime()
    logger.verbose(
        "Performing reinitialization of clients, removing #%d cached ones and recreating LMClient",
        len(runtime.ready_clients))
    runtime.ready_clients = {
        "@LM": LMClient(settings.logicmodule_host, settings.logicmodule_port),
    }
示例#9
0
def get_backend_id(hostname, port):
    """Return dataClay backend present in the system with name provided."""
    all_backends = getRuntime().get_all_execution_environments_info(
        force_update=True)
    for backend in all_backends.values():
        if backend.hostname == hostname and backend.port == port:
            return backend.id
    return None
示例#10
0
def migrate_federated_objects(origin_dataclay_id, dest_dataclay_id):
    """ Migrate federated objects from origin dataclay to destination dataclay
    :param origin_dataclay_id: origin dataclay id 
    :param dest_dataclay_id destination dataclay id
    :return: None
    :rtype: None
    """
    return getRuntime().migrate_federated_objects(origin_dataclay_id,
                                                  dest_dataclay_id)
示例#11
0
def get_dataclay_id(exthostname, extport):
    """ Get external dataClay ID with host and port identified
    :param exthostname: external dataClay host name
    :param extport: external dataClay port
    :return: None
    :type exthostname: string
    :type extport: int
    :rtype: None
    """
    return getRuntime().get_external_dataclay_id(exthostname, extport)
示例#12
0
def register_dataclay(exthostname, extport):
    """ Register external dataClay for federation
    :param exthostname: external dataClay host name
    :param extport: external dataClay port
    :return: external dataClay ID registered
    :type exthostname: string
    :type extport: int
    :rtype: UUID
    """
    return getRuntime().register_external_dataclay(exthostname, extport)
示例#13
0
def finish_tracing():
    """
    Finishes tracing if needed 
    """
    if extrae_tracing_is_enabled():
        if int(settings.extrae_starting_task_id
               ) == 0:  # in compss Java runtime will get traces for us
            getRuntime().deactivate_tracing_in_dataclay_services()
            getRuntime().deactivate_tracing()
            getRuntime().get_traces_in_dataclay_services()  # not on workers!
        else:
            getRuntime().deactivate_tracing()
示例#14
0
def import_models_from_external_dataclay(namespace, ext_dataclay_id) -> None:
    """ Import models in namespace specified from an external dataClay
    :param namespace: external dataClay namespace to get
    :param ext_dataclay_id: external dataClay ID
    :return: None
    :type namespace: string
    :type ext_dataclay_id: UUID
    :rtype: None
    """
    return getRuntime().import_models_from_external_dataclay(
        namespace, ext_dataclay_id)
示例#15
0
    def test(self):
        """Test. note that all test method names must begin with 'test.'"""
        """WARNING: IT IS HIGHLY RECOMMENDED TO HAVE ONE TEST ONLY TO ISOLATE FUNCTIONAL TESTS FROM EACH OTHER. i.e. 
        Start a new Python Interpreter and JVM for each test. In the end, it means only one test in this class. """
        from dataclay.api import init

        logger.debug('**Starting init**')
        init()
        
        """ 
        Imports. Imports must be located here in order to simulate "import" order in a real scenario. 
        VERY IMPORTANT: Imports must be located AFTER init
        """
        from model.replication_class import Person
        from dataclay import getRuntime
        from dataclay.exceptions.exceptions import DataClayException

        self.session_initialized = True
    
        """
        Test. From now on, the Functional Test itself. 
        """
        environments_ids = list(getRuntime().get_execution_environments_info().keys())
        environment1_id = environments_ids[0]

        self.assertEqual(len(environments_ids), 2) 

        # When we create a Person object we call the locally set for non-persistent object.__setattr__
        p = Person('foo', 100)
        r = Person('fat', 200)

        p.make_persistent(backend_id=environment1_id)
        r.make_persistent(backend_id=environment1_id)
        
        self.assertEqual(p.get_master_location(), environment1_id)

        # name is a replicated attribute so the before method should be called before the setter
        # When we change the name we call a inMaster setter execute_implementation_aux('__setUpdate__', ...)
        p.name = 'aaa'

        # When we change the age we call a remote __setUpdate__ on object
        r.age = 78
        self.assertEqual(r.age, 78)

        # Assert that the attribute was properly changed
        self.assertEqual(p.name, 'aaa')

        # Check that before method was called
        self.assertEqual(p.years, 3)
        
        logger.debug("Test OK!")
示例#16
0
def finish():
    global _initialized
    if not _initialized:
        logger.warning("Already finished --ignoring")
        return
    global _connection_initialized
    logger.info("Finishing dataClay API")
    finish_tracing()
    getRuntime().close_session()
    logger.debug(f"Closed session {settings.current_session_id}")
    getRuntime().stop_runtime()
    # Unload stubs
    clean_babel_data()
    sys.path.remove(os.path.join(settings.stubs_folder, 'sources'))
    # unload caches of stubs
    from dataclay.commonruntime.ExecutionGateway import loaded_classes, class_extradata_cache_client, \
        class_extradata_cache_exec_env
    loaded_classes.clear()
    class_extradata_cache_exec_env.clear()
    class_extradata_cache_client.clear()
    # unload settings
    unload_settings()
    _initialized = False
    _connection_initialized = False
示例#17
0
    def test(self):
        """Test. note that all test method names must begin with 'test.'"""
        """WARNING: IT IS HIGHLY RECOMMENDED TO HAVE ONE TEST ONLY TO ISOLATE FUNCTIONAL TESTS FROM EACH OTHER. i.e. 
        Start a new Python Interpreter and JVM for each test. In the end, it means only one test in this class. """
        from dataclay.api import init

        logger.debug('**Starting init**')
        init()
        """ 
        Imports. Imports must be located here in order to simulate "import" order in a real scenario. 
        VERY IMPORTANT: Imports must be located AFTER init
        """
        from dataclay import getRuntime
        from dataclay.communication.grpc.messages.common.common_messages_pb2 import LANG_PYTHON, LANG_JAVA
        from dataclay.commonruntime.Settings import settings

        self.session_initialized = True
        """
        Test. From now on, the Functional Test itself. 
        """

        lm_client = getRuntime().ready_clients["@LM"]

        python_ees_info = lm_client.get_execution_environments_info(
            settings.current_session_id, LANG_PYTHON)
        java_ees_info = lm_client.get_execution_environments_info(
            settings.current_session_id, LANG_JAVA)

        #### WARNING!!!!! get_execution_environments_per_locations_for_ds DEPRECATED!
        python_ee_per_loc_for_ds = lm_client.get_execution_environments_per_locations_for_ds(
            LANG_PYTHON)
        java_ee_per_loc_for_ds = lm_client.get_execution_environments_per_locations_for_ds(
            LANG_JAVA)

        # Check that EEs are correctly initialized and assigned to the right SL

        for py_ee in python_ees_info:
            self.assertNotIn(py_ee, java_ees_info.values())
            self.assertIn(py_ee, python_ee_per_loc_for_ds.values())
            self.assertNotIn(py_ee, java_ee_per_loc_for_ds.values())

        for java_ee in java_ees_info:
            self.assertNotIn(java_ee, python_ees_info.values())
            self.assertIn(java_ee, java_ee_per_loc_for_ds.values())
            self.assertNotIn(java_ee, python_ee_per_loc_for_ds.values())

        logger.debug("Test OK!")
示例#18
0
    def test(self):
        """Test. note that all test method names must begin with 'test.'"""
        """WARNING: IT IS HIGHLY RECOMMENDED TO HAVE ONE TEST ONLY TO ISOLATE FUNCTIONAL TESTS FROM EACH OTHER. i.e. 
        Start a new Python Interpreter and JVM for each test. In the end, it means only one test in this class. """
        from dataclay.api import init

        logger.debug('**Starting init**')
        init()
        
        """ 
        Imports. Imports must be located here in order to simulate "import" order in a real scenario. 
        VERY IMPORTANT: Imports must be located AFTER init
        """
        from model.classes import WebSite, WebPage, URI
        from dataclay import getRuntime        
        self.session_initialized = True
    
        """
        Test. From now on, the Functional Test itself. 
        """

        host = "bsc.es"
        web_site = WebSite(host)

        environments_ids = list(getRuntime().get_execution_environments_info().keys())
        self.assertEqual(len(environments_ids), 4)
        
        environment1_id = environments_ids[0]

        # MakePersistent in location1        
        web_site.make_persistent(backend_id=environment1_id)
        object_id = web_site.get_object_id()
        backend_id = web_site.get_location()

        # Assert that backend_id of persistent object is environment1
        self.assertTrue(web_site.is_persistent())
        self.assertIsNotNone(object_id)
        self.assertEqual(backend_id, environment1_id)
        
        # Create replicas in all EEs
        web_site.new_replica(backend_id=environments_ids[1])
        web_site.new_replica(backend_id=environments_ids[2])
        web_site.new_replica(backend_id=environments_ids[3])
        
        logger.debug("Test OK!")
示例#19
0
def init_connection(client_file):
    """Initialize the connection client ==> LogicModule.

    Note that the connection can be initialized standalone from here (like the
    dataClay tool performs) or it can be initialized by the full init() call.

    :param client_file: The path to the `client.properties` file. If set to None,
    then this function assumes that the connection settings are already loaded.
    :return: The LogicModule client (also accessible through the global
    commonruntime.ready_clients["@LM"]
    """
    global _connection_initialized
    logger.debug("Initializing dataClay connection with LM")
    runtime = getRuntime()
    if _connection_initialized:
        logger.warning(
            "Runtime already has a client with the LogicModule, reusing that")
        return runtime.ready_clients["@LM"]

    if client_file:
        settings.load_connection(client_file)

    # Once the properties are load, we can prepare the LM client
    logger.debug("Initializing dataClay connection with LM %s:%s",
                 settings.logicmodule_host, settings.logicmodule_port)
    client = LMClient(settings.logicmodule_host, settings.logicmodule_port)
    runtime.ready_clients["@LM"] = client

    _connection_initialized = True

    settings.logicmodule_dc_instance_id = client.get_dataclay_id()

    logger.debug("DataclayInstanceID is %s, storing client in cache",
                 settings.logicmodule_dc_instance_id)
    runtime.ready_clients[
        settings.logicmodule_dc_instance_id] = runtime.ready_clients["@LM"]

    # wait for 1 python backend
    while len(get_backends()) < 1:
        logger.info("Waiting for any python backend to be ready ...")
        sleep(2)

    return client
示例#20
0
    def test(self):
        """Test. note that all test method names must begin with 'test.'"""
        """WARNING: IT IS HIGHLY RECOMMENDED TO HAVE ONE TEST ONLY TO ISOLATE FUNCTIONAL TESTS FROM EACH OTHER. i.e. 
        Start a new Python Interpreter and JVM for each test. In the end, it means only one test in this class. """
        from dataclay.api import init

        logger.debug('**Starting init**')
        init()
        """ 
        Imports. Imports must be located here in order to simulate "import" order in a real scenario. 
        VERY IMPORTANT: Imports must be located AFTER init
        """
        from model.classes import WebSite, WebPage
        from dataclay import getRuntime
        self.session_initialized = True
        """
        Test. From now on, the Functional Test itself. 
        """

        execs_info = getRuntime().get_execution_environments_info()

        exec_env_info_1 = execs_info[list(execs_info.keys())[0]]
        exec_env_info_2 = execs_info[list(execs_info.keys())[1]]

        host = "bsc.es"
        web_site = WebSite(host)
        web_site.make_persistent(alias=web_site.uri.host,
                                 backend_id=exec_env_info_1.dataClayID)

        web_page = WebPage(host + "/page.html")
        web_page.make_persistent(backend_id=exec_env_info_2.dataClayID)

        web_site.add_web_page(web_page)

        self.assertTrue(web_site.is_persistent())
        self.assertTrue(web_site.uri.is_persistent())
        self.assertTrue(web_page.is_persistent())  # volatile is persistent
        self.assertTrue(web_page.uri.is_persistent())  # volatile is persistent

        logger.debug("Test OK!")
示例#21
0
    def getAllObjectsIDs(self):

        res = []

        for obj in self.objectsDKB.get_objects([], False, True, False):

            obj_id = getRuntime().get_object_id_by_alias(obj[4])
            class_id = 'b79a0fd1-ac91-41fe-b5c6-ff0a5b993a83'

#            obj_id = uuid.UUID(obj_id)
#            class_id = uuid.UUID(class_id)
#            res.append(f'{str(obj_id)}:{class_id}')
#            res.append(obj[4])

        print(res)

        return [
            '0bd030a1-b220-451b-91e2-491e440e9824:649420f0-98bb-4a7f-b9e9-cfb3a5d1683c',
            '6c2cfffd-305c-4fcc-a17a-617a87e132c8:649420f0-98bb-4a7f-b9e9-cfb3a5d1683c',
            '6bbff6d3-1717-4d2d-a3e4-b10323b13114:649420f0-98bb-4a7f-b9e9-cfb3a5d1683c',
            '34082340-8c92-4d20-ad89-06e82c73ad84:649420f0-98bb-4a7f-b9e9-cfb3a5d1683c',
            '2d7d973c-843c-447a-bc74-7875baafd9c0:649420f0-98bb-4a7f-b9e9-cfb3a5d1683c',
            'b1dd4284-151b-4450-9fa3-512e10dbe8d4:649420f0-98bb-4a7f-b9e9-cfb3a5d1683c',
            'facbae07-3ace-4f37-8842-456e4cd8690e:649420f0-98bb-4a7f-b9e9-cfb3a5d1683c',
            'd4b23a92-00c3-4c2b-88f0-53db534d4996:649420f0-98bb-4a7f-b9e9-cfb3a5d1683c',
            'fb480175-5760-47f8-b5e3-cf84086b5a0b:649420f0-98bb-4a7f-b9e9-cfb3a5d1683c',
            'a10d0c53-9fba-4bfa-bf5b-1149f2b7d241:649420f0-98bb-4a7f-b9e9-cfb3a5d1683c',
            'fd12a19f-d166-47fe-9554-51bcd62cbe32:649420f0-98bb-4a7f-b9e9-cfb3a5d1683c',
            'ce7dc91c-d19e-420d-af9f-688b39f4095f:649420f0-98bb-4a7f-b9e9-cfb3a5d1683c',
            'b5c76764-6063-47c0-8beb-478f5e54d722:649420f0-98bb-4a7f-b9e9-cfb3a5d1683c',
            '342c1146-2a9c-4b10-886e-600a46eb8657:649420f0-98bb-4a7f-b9e9-cfb3a5d1683c',
            '68d921fc-2e5d-4aff-882e-34a4ca2ed674:649420f0-98bb-4a7f-b9e9-cfb3a5d1683c',
            '9dc4914b-0894-4a21-a9fa-e10648830c42:649420f0-98bb-4a7f-b9e9-cfb3a5d1683c',
            'f238215c-785a-4ded-86d7-eda82104ef86:649420f0-98bb-4a7f-b9e9-cfb3a5d1683c',
            '326b6e4d-d6cc-421a-88f0-7a8d2afad3ea:649420f0-98bb-4a7f-b9e9-cfb3a5d1683c',
            '3023b5ea-8000-40a1-8924-d5548005d294:649420f0-98bb-4a7f-b9e9-cfb3a5d1683c',
            'da54ed26-580a-49f5-83a5-f998aa3e3c19:649420f0-98bb-4a7f-b9e9-cfb3a5d1683c',
            '4aba55b7-d7a7-4b15-b280-7788504d6567:649420f0-98bb-4a7f-b9e9-cfb3a5d1683c'
        ]
示例#22
0
def get_num_objects():
    """ Get number of objects in dataClay
    :return: number of objects in dataClay
    :rtype: int32
    """
    return getRuntime().get_num_objects()
示例#23
0
    def test(self):
        """Test. note that all test method names must begin with 'test.'"""
        """WARNING: IT IS HIGHLY RECOMMENDED TO HAVE ONE TEST ONLY TO ISOLATE FUNCTIONAL TESTS FROM EACH OTHER. i.e. 
        Start a new Python Interpreter and JVM for each test. In the end, it means only one test in this class. """
        from dataclay.api import init

        logger.debug('**Starting init**')
        init()
        """ 
        Imports. Imports must be located here in order to simulate "import" order in a real scenario. 
        VERY IMPORTANT: Imports must be located AFTER init
        """
        from model.classes import WebSite, WebPage, URI
        from dataclay import getRuntime
        self.session_initialized = True
        """
        Test. From now on, the Functional Test itself. 
        """
        # Test recursive makePersistent without circular dependencies
        host = "bsc.es"
        web_site = WebSite(host)
        web_page = WebPage(host + "/page.html")

        web_site.add_web_page(web_page)

        environments_ids = list(
            getRuntime().get_execution_environments_info().keys())
        environment1_id = environments_ids[0]
        environment2_id = environments_ids[1]
        environment3_id = environments_ids[2]

        self.assertEqual(len(environments_ids), 3)

        web_site.make_persistent(alias=web_site.uri.host,
                                 backend_id=environment1_id)

        ws_locations = web_site.get_all_locations()
        ws_uri_locations = web_site.uri.get_all_locations()
        wp_locations = web_page.get_all_locations()
        wp_uri_locations = web_page.uri.get_all_locations()

        # Check Persistence
        self.assertTrue(web_site.is_persistent())
        self.assertEqual(len(ws_locations), 1)
        self.assertIn(environment1_id, ws_locations)

        self.assertTrue(web_site.uri.is_persistent())
        self.assertEqual(len(ws_uri_locations), 1)
        self.assertIn(environment1_id, ws_uri_locations)

        self.assertTrue(web_page.is_persistent())
        self.assertEqual(len(wp_locations), 1)
        self.assertIn(environment1_id, wp_locations)

        self.assertTrue(web_page.uri.is_persistent())
        self.assertEqual(len(wp_uri_locations), 1)
        self.assertIn(environment1_id, wp_uri_locations)

        # Move in the second location
        getRuntime().move_object(web_site, environment1_id, environment2_id,
                                 True)

        ws_locations = web_site.get_all_locations()
        ws_uri_locations = web_site.uri.get_all_locations()
        wp_locations = web_page.get_all_locations()
        wp_uri_locations = web_page.uri.get_all_locations()

        # Check that the object and associated ones are now in the second location
        self.assertEqual(len(ws_locations), 1)
        self.assertIn(environment2_id, ws_locations)

        self.assertEqual(len(ws_uri_locations), 1)
        self.assertIn(environment2_id, ws_uri_locations)

        self.assertEqual(len(wp_locations), 1)
        self.assertIn(environment2_id, wp_locations)

        self.assertEqual(len(wp_uri_locations), 1)
        self.assertIn(environment2_id, wp_uri_locations)

        # Move in the third location
        getRuntime().move_object(web_site, environment2_id, environment3_id,
                                 True)

        ws_locations = web_site.get_all_locations()
        ws_uri_locations = web_site.uri.get_all_locations()
        wp_locations = web_page.get_all_locations()
        wp_uri_locations = web_page.uri.get_all_locations()

        # Check that the object and associated ones are now in the third location
        self.assertEqual(len(ws_locations), 1)
        self.assertIn(environment3_id, ws_locations)

        self.assertEqual(len(ws_uri_locations), 1)
        self.assertIn(environment3_id, ws_uri_locations)

        self.assertEqual(len(wp_locations), 1)
        self.assertIn(environment3_id, wp_locations)

        self.assertEqual(len(wp_uri_locations), 1)
        self.assertIn(environment3_id, wp_uri_locations)
        logger.debug("Test OK!")
示例#24
0
def finish_tracing():
    """
    Finishes tracing if needed
    """
    if extrae_tracing_is_enabled():
        extrae_compss = int(settings.extrae_starting_task_id) != 0

        if extrae_compss:
            if get_task_id() == 0:
                getRuntime().deactivate_tracing(False)
                # in compss Java runtime will get traces for us
            else:
                getRuntime().deactivate_tracing(False)

        else:
            if get_task_id() == 0:
                getRuntime().deactivate_tracing_in_dataclay_services()
                getRuntime().deactivate_tracing(True)
                getRuntime().get_traces_in_dataclay_services(
                )  # not on workers!
                # Merge
                os.system(
                    "mpi2prv -keep-mpits -no-syn -f TRACE.mpits -o ./trace/dctrace.prv"
                )
            else:
                getRuntime().deactivate_tracing(True)
示例#25
0
def finish():
    global _initialized
    logger.info("Finishing dataClay API")
    finish_tracing()
    getRuntime().stop_runtime()
    _initialized = False
示例#26
0
def post_network_init():
    global _initialized
    """Perform the last part of initialization, now with network."""
    client = init_connection(None)

    # In all cases, track (done through babelstubs YAML file)
    contracts = track_local_available_classes()

    # Ensure they are in the path (high "priority")
    sys.path.insert(0, os.path.join(settings.stubs_folder, 'sources'))

    if not contracts:
        logger.warning(
            "No contracts available. Calling new_session, but no classes will be available"
        )
    """ Initialize runtime """
    getRuntime().initialize_runtime()

    session_info = client.new_session(
        settings.current_id, settings.current_credential, contracts, [
            client.get_dataset_id(settings.current_id,
                                  settings.current_credential, dataset)
            for dataset in settings.datasets
        ],
        client.get_dataset_id(settings.current_id, settings.current_credential,
                              settings.dataset_for_store), LANG_PYTHON)
    settings.current_session_id = session_info.sessionID

    name = settings.local_backend_name
    if name:
        exec_envs = getRuntime().get_execution_environments_info()
        for k, v in exec_envs.items():
            if exec_envs[k].name == name:
                global LOCAL
                LOCAL = k
                break
        else:
            logger.warning("Backend with name '%s' not found, ignoring", name)

    settings.dataset_id = client.get_dataset_id(settings.current_id,
                                                settings.current_credential,
                                                settings.dataset_for_store)

    # Remember this function is called after a fork in workers also.
    # Activate Extrae if needed.
    ### READ ####
    # Activating tracing with tracing_enabled property set True and starting task id = 0 means we are only tracing dataClay
    # dataClay client will not increment current available task ID and will send a 0 to LM, which will understand the 0 as
    # "only dataClay tracing" since for compss it is never 0.
    # Activating tracing with tracing_enabled property set True and starting task id != 0 means we are tracing COMPSs
    # and dataClay. Current client will not initialize pyextrae or increment task id since COMPSs already initializes
    # it for us (as a worker).
    # In any case, none of them needs to add synchronization event or increment the available task id (only services).
    # Synchronization events are used to merge LM traces and python EE traces. Incrementing available task id is useful to
    # send to N EE/DS nodes.
    if settings.tracing_enabled:
        logger.info("Initializing tracing")
        # set current available task id
        set_current_available_task_id(int(settings.extrae_starting_task_id))
        # -- Do NOT add synchronization events if it is a compss worker (default case of activation) --
        getRuntime().activate_tracing()
        # TODO: check if is in master, should always be master since initWorker is the one called from workers only?
        # Fallback: if services have tracing active, the call is ignored
        getRuntime().activate_tracing_in_dataclay_services()

    # The new_session RPC may fall, and thus we will consider
    # the library as "not initialized". Arriving here means "all ok".
    _initialized = True
示例#27
0
    def test(self):
        """Test. note that all test method names must begin with 'test.'"""
        """WARNING: IT IS HIGHLY RECOMMENDED TO HAVE ONE TEST ONLY TO ISOLATE FUNCTIONAL TESTS FROM EACH OTHER. i.e. 
        Start a new Python Interpreter and JVM for each test. In the end, it means only one test in this class. """
        from dataclay.api import init

        logger.debug('**Starting init**')
        init()
        """ 
        Imports. Imports must be located here in order to simulate "import" order in a real scenario. 
        VERY IMPORTANT: Imports must be located AFTER init
        """
        from model.classes import Player, Carrer
        from dataclay import getRuntime
        from dataclay.DataClayObjProperties import DCLAY_GETTER_PREFIX

        self.session_initialized = True
        """
        Test. From now on, the Functional Test itself. 
        """
        environments_ids = list(
            getRuntime().get_execution_environments_info().keys())
        environment1_id = environments_ids[0]
        environment2_id = environments_ids[1]

        messi = Player("Leo", "Messi", 30)

        messi.add_role("forward")
        messi.add_skill("dribbling")

        messi_carrer = Carrer()

        stats_2016_2017 = dict()
        stats_2016_2017["goal"] = 54
        stats_2016_2017["presence"] = 52

        messi_carrer.add_stat("2016/2017", stats_2016_2017)

        messi_carrer.add_team("2016/2017", "Barcelona FC")

        messi.add_carrer(messi_carrer)

        messi.add_test_types(True, 2, "Str")

        messi.make_persistent(backend_id=environment1_id)

        self.assertTrue(messi.is_persistent)
        self.assertTrue(messi.carrer.is_persistent)

        messi.new_replica(backend_id=environment2_id)

        # Updates locations after replication
        messi_locations = messi.get_all_locations()
        messi_carrer_locations = messi.carrer.get_all_locations()

        # Check that object is replicated
        self.assertEqual(len(messi_locations), 2)
        self.assertIn(environment1_id, messi_locations)
        self.assertIn(environment2_id, messi_locations)

        # Check that associated objects are replicated
        self.assertIn(environment2_id, messi_carrer_locations)

        replicated_messi_carrer = messi.run_remote(
            environment2_id, DCLAY_GETTER_PREFIX + 'carrer', None)
        replicated_messi_teams = replicated_messi_carrer.run_remote(
            environment2_id, DCLAY_GETTER_PREFIX + 'teams', None)
        replicated_messi_stats = replicated_messi_carrer.run_remote(
            environment2_id, DCLAY_GETTER_PREFIX + 'stats', None)

        self.assertEqual(messi.carrer.teams, replicated_messi_teams)
        self.assertEqual(messi.carrer.stats, replicated_messi_stats)
        logger.info("Messi replicated stats are %s", replicated_messi_stats)

        logger.debug("Test OK!")
示例#28
0
    def test(self):
        """Test. note that all test method names must begin with 'test.'"""
        """WARNING: IT IS HIGHLY RECOMMENDED TO HAVE ONE TEST ONLY TO ISOLATE FUNCTIONAL TESTS FROM EACH OTHER. i.e. 
        Start a new Python Interpreter and JVM for each test. In the end, it means only one test in this class. """
        from dataclay.api import init

        logger.debug('**Starting init**')
        init()
        """ 
        Imports. Imports must be located here in order to simulate "import" order in a real scenario. 
        VERY IMPORTANT: Imports must be located AFTER init
        """
        from model.nested_classes import NestedColl
        from dataclay import getRuntime
        from dataclay.DataClayObjProperties import DCLAY_GETTER_PREFIX

        self.session_initialized = True
        """
        Test. From now on, the Functional Test itself. 
        """
        environments_ids = list(
            getRuntime().get_execution_environments_info().keys())
        environment1_id = environments_ids[0]
        environment2_id = environments_ids[1]
        dict_a = dict()
        dict_a["test"] = 1
        set_a = set()
        set_a.add(1)

        main_list = [dict_a, [1, 2, 3, 4, 5], (1, 2, 3, 4, 5), set_a]
        self.assertEqual(type(main_list[0]), dict)
        self.assertEqual(type(main_list[1]), list)
        self.assertEqual(type(main_list[2]), tuple)
        self.assertEqual(type(main_list[3]), set)

        main_dict = dict()
        main_dict["dict"] = dict_a
        main_dict["list"] = [1, 2, 3, 4]
        main_dict["tuple"] = (1, 2, 3, 4)
        main_dict["set"] = set_a
        self.assertEqual(type(main_dict["dict"]), dict)
        self.assertEqual(type(main_dict["list"]), list)
        self.assertEqual(type(main_dict["tuple"]), tuple)
        self.assertEqual(type(main_dict["set"]), set)

        main_tuple = (dict_a, [1, 2, 3, 4, 5], (1, 2, 3, 4, 5), set_a)
        self.assertEqual(type(main_tuple[0]), dict)
        self.assertEqual(type(main_tuple[1]), list)
        self.assertEqual(type(main_tuple[2]), tuple)
        self.assertEqual(type(main_tuple[3]), set)

        main_set = set()
        main_set.add((1, 2, 3, 4))
        main_set.add(1)
        main_set.add("a")
        self.assertIn(1, main_set)
        self.assertIn("a", main_set)
        self.assertIn((1, 2, 3, 4), main_set)

        nested_coll = NestedColl(main_list, main_dict, main_tuple, main_set)

        # Test Persistence
        nested_coll.make_persistent(backend_id=environment1_id)

        self.assertEqual(main_list, nested_coll.a)
        self.assertEqual(main_dict, nested_coll.b)
        self.assertEqual(main_tuple, nested_coll.c)
        self.assertEqual(main_set, set(nested_coll.d))

        # Test Replication
        nested_coll.new_replica(backend_id=environment2_id)

        nested_coll_locations = nested_coll.get_all_locations()

        # Check that object is replicated
        self.assertEqual(len(nested_coll_locations), 2)
        self.assertIn(environment1_id, nested_coll_locations)
        self.assertIn(environment2_id, nested_coll_locations)

        replicated_list = nested_coll.run_remote(environment2_id,
                                                 DCLAY_GETTER_PREFIX + 'a',
                                                 None)
        replicated_dict = nested_coll.run_remote(environment2_id,
                                                 DCLAY_GETTER_PREFIX + 'b',
                                                 None)
        replicated_tuple = nested_coll.run_remote(environment2_id,
                                                  DCLAY_GETTER_PREFIX + 'c',
                                                  None)
        replicated_set = nested_coll.run_remote(environment2_id,
                                                DCLAY_GETTER_PREFIX + 'd',
                                                None)

        self.assertEqual(replicated_list, nested_coll.a)
        self.assertEqual(replicated_dict, nested_coll.b)
        self.assertEqual(replicated_tuple, nested_coll.c)
        self.assertEqual(replicated_set, nested_coll.d)

        # Test Version
        version_info, unloaded_version_info = nested_coll.new_version(
            environment1_id)
        logger.debug("Version info are:\n%s", version_info)
        versionOID = version_info.versionOID

        nested_coll_version = NestedColl.get_object_by_id(versionOID)
        logger.debug("New version of nested_coll is:\n%s", nested_coll_version)

        # NewVersion ID is different
        self.assertNotEqual(nested_coll.get_object_id(),
                            nested_coll_version.get_object_id())

        # NewVersion fields are the same of the original
        self.assertEqual(nested_coll.a, nested_coll_version.a)
        self.assertEqual(nested_coll.b, nested_coll_version.b)
        self.assertEqual(nested_coll.c, nested_coll_version.c)
        self.assertEqual(nested_coll.d, nested_coll_version.d)

        # Change fields and check that they are different from the original one
        dict_b = dict()
        dict_b["version"] = 23
        set_b = set()
        set_b.add(34)
        main_vers_list = [dict_b, [34, 2, 32, 4, 5], (1, 25, 3, 4, 5), set_b]
        main_vers_tuple = (dict_b, [1, 2, 35, 4, 5], (1, 2, 3, 42, 5), set_b)
        main_vers_dict = dict()
        main_vers_dict["vdict"] = dict_b
        main_vers_dict["vlist"] = [1, 2, 3, 4, 3]
        main_vers_dict["vtuple"] = (4, 2, 3, 4, 2)
        main_vers_dict["vset"] = set_b
        main_vers_set = set()
        main_vers_set.add((2, 4, 6, 3))
        main_vers_set.add(3)
        main_vers_set.add("c")
        nested_coll_version.change_fields(main_vers_list, main_vers_dict,
                                          main_vers_tuple, main_vers_set)

        self.assertNotEqual(nested_coll.a, nested_coll_version.a)
        self.assertNotEqual(nested_coll.b, nested_coll_version.b)
        self.assertNotEqual(nested_coll.c, nested_coll_version.c)
        self.assertNotEqual(nested_coll.d, nested_coll_version.d)

        nested_coll.consolidate_version(unloaded_version_info)

        self.assertEqual(main_vers_list, nested_coll.a)
        self.assertEqual(main_vers_dict, nested_coll.b)
        self.assertEqual(main_vers_tuple, nested_coll.c)
        self.assertEqual(main_vers_set, set(nested_coll.d))

        logger.debug("Test OK!")
示例#29
0
def get_backends():
    """Return all the dataClay backend present in the system."""
    result = getRuntime().get_execution_environments_names("admin", "admin")
    logger.debug("Got %i python backend/s", len(result))
    return result
示例#30
0
def get_backends_info():
    """Return all the dataClay BackendInfo present in the system."""
    result = getRuntime().get_execution_environments_info()
    logger.debug("Got %i python backend/s", len(result))
    return result