def initialize_profile(self, change_logger_in_dev=True):
        """
        Specific initialization when functioning with storage
        """
        super(WebSettingsProfile, self).initialize_profile()

        if change_logger_in_dev and self.env.is_development():
            self.LOGGER_CONFIG_FILE_NAME = "dev_logger_config.conf"

        ## Make sure DB events are linked.
        from tvb.core.traits import db_events
        db_events.attach_db_events()
示例#2
0
    def initialize_profile(self, change_logger_in_dev=True):
        """
        Specific initialization when functioning with storage
        """
        super(WebSettingsProfile, self).initialize_profile()

        if change_logger_in_dev and not self.env.is_distribution():
            self.LOGGER_CONFIG_FILE_NAME = "dev_logger_config.conf"

        ## Make sure DB events are linked.
        from tvb.core.traits import db_events
        db_events.attach_db_events()
示例#3
0
def initialize(introspected_modules, load_xml_events=True):
    """
    Initialize when Application is starting.
    Check for new algorithms or new DataTypes.
    """
    SettingsService().check_db_url(cfg.DB_URL)

    ## Initialize DB
    is_db_empty = initialize_startup()

    ## Create Projects storage root in case it does not exist.
    initialize_storage()

    ## Populate DB algorithms, by introspection
    event_folders = []
    start_introspection_time = datetime.datetime.now()
    for module in introspected_modules:
        introspector = Introspector(module)
        # Introspection is always done, even if DB was not empty.
        introspector.introspect(True)
        event_path = introspector.get_events_path()
        if event_path:
            event_folders.append(event_path)
    # Now remove any unverified Algo-Groups, categories or Portlets
    invalid_stored_entities = dao.get_non_validated_entities(
        start_introspection_time)
    for entity in invalid_stored_entities:
        dao.remove_entity(entity.__class__, entity.id)

    ## Populate events
    if load_xml_events:
        eventhandler.read_events(event_folders)

    ## Make sure DB events are linked.
    db_events.attach_db_events()

    ## Create default users.
    if is_db_empty:
        dao.store_entity(
            model.User(cfg.SYSTEM_USER_NAME, None, None, True, None))
        UserService().create_user(username=cfg.ADMINISTRATOR_NAME,
                                  password=cfg.ADMINISTRATOR_PASSWORD,
                                  email=cfg.ADMINISTRATOR_EMAIL,
                                  role=ROLE_ADMINISTRATOR)

    ## In case actions related to latest code-changes are needed, make sure they are executed.
    CodeUpdateManager().update_all()
    def initialize_profile(self, change_logger_in_dev=True):
        """
        Specific initialization when functioning with storage
        """
        super(WebSettingsProfile, self).initialize_profile()

        if change_logger_in_dev and self.env.is_development():
            self.LOGGER_CONFIG_FILE_NAME = "dev_logger_config.conf"

        ## Make sure DB events are linked.
        from tvb.core.traits import db_events
        db_events.attach_db_events()

        from tvb.basic.logger.builder import get_logger
        from tvb.interfaces.web.mplh5 import mplh5_server
        log = get_logger('tvb.interfaces.web.mplh5.mplh5_server')
        mplh5_server.start_server(log)
    def initialize_profile(self, change_logger_in_dev=True):
        """
        Specific initialization when functioning with storage
        """
        super(WebSettingsProfile, self).initialize_profile()

        if change_logger_in_dev and self.env.is_development():
            self.LOGGER_CONFIG_FILE_NAME = "dev_logger_config.conf"

        ## Make sure DB events are linked.
        from tvb.core.traits import db_events
        db_events.attach_db_events()

        from tvb.basic.logger.builder import get_logger
        from tvb.interfaces.web.mplh5 import mplh5_server
        log = get_logger('tvb.interfaces.web.mplh5.mplh5_server')
        mplh5_server.start_server(log)
def initialize(introspected_modules, load_xml_events=True):
    """
    Initialize when Application is starting.
    Check for new algorithms or new DataTypes.
    """
    SettingsService().check_db_url(cfg.DB_URL)
    
    ## Initialize DB
    is_db_empty = initialize_startup()
    
    ## Create Projects storage root in case it does not exist.
    initialize_storage()
    
    ## Populate DB algorithms, by introspection
    event_folders = []
    start_introspection_time = datetime.datetime.now()
    for module in introspected_modules:
        introspector = Introspector(module)
        # Introspection is always done, even if DB was not empty.
        introspector.introspect(True)
        event_path = introspector.get_events_path()
        if event_path:
            event_folders.append(event_path)
    # Now remove any unverified Algo-Groups, categories or Portlets
    invalid_stored_entities = dao.get_non_validated_entities(start_introspection_time)
    for entity in invalid_stored_entities:
        dao.remove_entity(entity.__class__, entity.id)
   
    ## Populate events
    if load_xml_events:
        read_events(event_folders)
    
    ## Make sure DB events are linked.
    db_events.attach_db_events()
    
    ## Create default users.
    if is_db_empty:
        dao.store_entity(model.User(cfg.SYSTEM_USER_NAME, None, None, True, None))
        UserService().create_user(username=cfg.ADMINISTRATOR_NAME, password=cfg.ADMINISTRATOR_PASSWORD,
                                  email=cfg.ADMINISTRATOR_EMAIL, role=model.ROLE_ADMINISTRATOR)
        
    ## In case actions related to latest code-changes are needed, make sure they are executed.
    CodeUpdateManager().update_all()
        LOGGER.debug("Importing Algorithm: " + str(algorithm_group.classname) +
                     " for Operation:" + str(curent_operation.id))
        PARAMS = parse_json_parameters(curent_operation.parameters)
        adapter_instance = ABCAdapter.build_adapter(algorithm_group)

        ## Un-comment bellow for profiling an operation:
        ## import cherrypy.lib.profiler as profiler
        ## p = profiler.Profiler("/Users/lia.domide/TVB/profiler/")
        ## p.run(OperationService().initiate_prelaunch, curent_operation, adapter_instance, {}, **PARAMS)

        OperationService().initiate_prelaunch(curent_operation,
                                              adapter_instance, {}, **PARAMS)
        LOGGER.debug("Successfully finished operation " + str(operation_id))

    except Exception, excep:
        LOGGER.error("Could not execute operation " + str(sys.argv[1]))
        LOGGER.exception(excep)
        parent_burst = dao.get_burst_for_operation_id(operation_id)
        if parent_burst is not None:
            WorkflowService().mark_burst_finished(parent_burst,
                                                  error=True,
                                                  error_message=str(excep))


if __name__ == '__main__':
    OPERATION_ID = sys.argv[1]
    # Make sure DB events are linked.
    db_events.attach_db_events()
    do_operation_launch(OPERATION_ID)
    sys.exit(0)
        LOGGER.debug("Importing Algorithm: " + str(algorithm_group.classname) +
                     " for Operation:" + str(curent_operation.id))
        PARAMS = parse_json_parameters(curent_operation.parameters)
        adapter_instance = ABCAdapter.build_adapter(algorithm_group)

        ## Un-comment bellow for profiling an operation:
        ## import cherrypy.lib.profiler as profiler
        ## p = profiler.Profiler("/Users/lia.domide/TVB/profiler/")
        ## p.run(OperationService().initiate_prelaunch, curent_operation, adapter_instance, {}, **PARAMS)

        OperationService().initiate_prelaunch(curent_operation, adapter_instance, {}, **PARAMS)
        LOGGER.debug("Successfully finished operation " + str(operation_id))

    except Exception, excep:
        LOGGER.error("Could not execute operation " + str(sys.argv[1]))
        LOGGER.exception(excep)
        parent_burst = dao.get_burst_for_operation_id(operation_id)
        if parent_burst is not None:
            WorkflowService().mark_burst_finished(parent_burst, error=True, error_message=str(excep))



if __name__ == '__main__':
    OPERATION_ID = sys.argv[1]
    # Make sure DB events are linked.
    db_events.attach_db_events()
    do_operation_launch(OPERATION_ID)
    sys.exit(0)