def test_get_header_token(self):
        # ensure 'Authorization' header is used if available
        with self.__app.test_request_context(
                '/test', headers={'Authorization': 'Bearer aaa-bbb'}):
            token = UserAuthentication.get_header_token()
        self.assertEqual(token, 'aaa-bbb')

        # ensure 'no_token' is used if 'Authorization' header is not available
        with self.__app.test_request_context('/test'):
            token = UserAuthentication.get_header_token()
        self.assertEqual(token, 'no_token')
Example #2
0
 def _check_and_extract_environment_zip(self, experiment):
     """
     Checks for validity and extracts a zipped environment. First we
     make sure that the zip referenced in the experiment exists in the
     list of user environments, then we unzip it on the fly in the temporary
     simulation directory. After the extraction we also make sure to copy
     the sdf from the experiment folder cause the user may have modified it
     :param experiment: The experiment object.
     """
     from hbp_nrp_backend.storage_client_api.StorageClient import StorageClient
     client = StorageClient()
     environments_list = client.get_custom_models(
         UserAuthentication.get_header_token(request),
         self.simulation.ctx_id, 'environments')
     # we use the paths of the uploaded zips to make sure the selected
     # zip is there
     paths_list = [environment['path'] for environment in environments_list]
     # check if the zip is in the user storage
     zipped_model_path = [
         path for path in paths_list
         if experiment.environmentModel.customModelPath in path
     ]
     if len(zipped_model_path):
         environment_path = os.path.join(
             client.get_temp_directory(),
             os.path.basename(experiment.environmentModel.src))
         storage_env_zip_data = client.get_custom_model(
             UserAuthentication.get_header_token(request),
             self.simulation.ctx_id, zipped_model_path[0])
         env_sdf_name = os.path.basename(experiment.environmentModel.src)
         env_path = os.path.join(
             client.get_temp_directory(),
             experiment.environmentModel.customModelPath)
         with open(env_path, 'w') as environment_zip:
             environment_zip.write(storage_env_zip_data)
         with zipfile.ZipFile(env_path) as env_zip_to_extract:
             env_zip_to_extract.extractall(path=client.get_temp_directory())
         # copy back the .sdf from the experiment folder, cause we don't want the one
         # in the zip, cause the user might have made manual changes
         client.clone_file(env_sdf_name,
                           UserAuthentication.get_header_token(request),
                           self.simulation.experiment_id)
     # if the zip is not there, prompt the user to check his uploaded
     # models
     else:
         raise NRPServicesGeneralException(
             "Could not find selected zip %s in the list of uploaded models. Please make\
                 sure that it has been uploaded correctly" %
             (os.path.dirname(experiment.environmentModel.src)),
             "Zipped model retrieval failed")
     return environment_path
Example #3
0
    def post(self, sim_id):
        """
        Save the current running experiment SDF back to the storage
        :param sim_id: The sim_id
        :param context_id: The context_id of the experiment
        :status 500: Error saving file
        :status 200: Success. File written.
        """
        # pylint: disable=too-many-locals
        simulation = _get_simulation_or_abort(sim_id)

        try:
            rospy.wait_for_service('/gazebo/export_world_sdf', 3)
        except rospy.ROSException as exc:
            raise NRPServicesUnavailableROSService(str(exc))

        dump_sdf_world = rospy.ServiceProxy('/gazebo/export_world_sdf',
                                            ExportWorldSDF)

        try:
            sdf_string = dump_sdf_world().sdf_dump
            tree = ET.fromstring(sdf_string)
            # Erase all robots from the SDF
            robots = simulation.cle.get_simulation_robots()
            for robot in robots:
                for m in tree.findall(".//model[@name='" + robot.robot_id +
                                      "']"):
                    m.getparent().remove(m)
            sdf_string = ET.tostring(tree, encoding='utf8', method='xml')
        except rospy.ServiceException as exc:
            raise NRPServicesClientErrorException(
                "Service did not process request:" + str(exc))

        client = StorageClient()

        # find the sdf world filename from the .exc
        exp_xml_file_path = client.clone_file(
            'experiment_configuration.exc',
            UserAuthentication.get_header_token(), simulation.experiment_id)

        experiment_file = client.parse_and_check_file_is_valid(
            exp_xml_file_path, exp_conf_api_gen.CreateFromDocument,
            exp_conf_api_gen.ExD_)

        world_file_name = experiment_file.environmentModel.src

        client.create_or_update(UserAuthentication.get_header_token(),
                                simulation.experiment_id, world_file_name,
                                sdf_string, "text/plain")

        return 200
    def _get_brain_info_from_storage(cls, experiment_id, context_id):
        """
        Gathers from the storage the brain script and the populations by getting the BIBI
        configuration file.

        :param experiment_id: the id of the experiment in which to look for the brain information
        :param context_id: the context ID for collab based simulations
        :return: A tuple with the path to the brain file and a list of populations
        """
        del context_id  # Unused

        request_token = UserAuthentication.get_header_token()

        experiment_file = cls.storage_client.get_file(
            request_token, experiment_id, 'experiment_configuration.exc', by_name=True)

        bibi_filename = exp_conf_api_gen.CreateFromDocument(experiment_file).bibiConf.src

        # find the brain filename from the bibi
        bibi_file = cls.storage_client.get_file(
            request_token, experiment_id, bibi_filename, by_name=True)

        bibi_file_obj = bibi_api_gen.CreateFromDocument(bibi_file)
        brain_filename = os.path.basename(bibi_file_obj.brainModel.file)

        brain_filepath = cls.storage_client.clone_file(brain_filename, request_token, experiment_id)

        neurons_config = get_all_neurons_as_dict(bibi_file_obj.brainModel.populations)

        neurons_config_clean = \
            [SimulationResetStorage._get_experiment_population(name, v)
             for (name, v) in neurons_config.iteritems()]

        return brain_filepath, neurons_config_clean, neurons_config
    def reset_from_storage_all(cls, simulation, experiment_id, context_id):
        """
        Reset states machines and transfer functions

        :param: the simulation id
        :param: the experiment id
        :param: the context_id for collab based simulations
        """

        SimUtil.clear_dir(simulation.lifecycle.sim_dir)

        token = UserAuthentication.get_header_token()
        cls.storage_client.clone_all_experiment_files(
            token, experiment_id, destination_dir=simulation.lifecycle.sim_dir)

        with open(simulation.lifecycle.experiment_path) as exc_file:
            exc = exp_conf_api_gen.CreateFromDocument(exc_file.read())

        bibi_path = os.path.join(os.path.dirname(simulation.lifecycle.experiment_path),
                                 exc.bibiConf.src)
        with open(bibi_path) as bibi_file:
            bibi = bibi_api_gen.CreateFromDocument(bibi_file.read())

        cls.reset_brain(simulation, experiment_id, context_id)
        cls.reset_transfer_functions(simulation, bibi, simulation.lifecycle.sim_dir)
        cls.reset_state_machines(simulation, exc, simulation.lifecycle.sim_dir)
Example #6
0
    def _parse_env_path(self, environment_path, experiment, using_storage):
        """
        Parses the environment path, depending if we are using a storage model from
        a template experiment(where we have to fetch the model from the storage),
        or we are running a storage experiment where the model is already there.
        Default case is when we are not using a storage model

        :param experiment: The experiment object.
        :param environment_path: Path to the environment configuration.
        :param using_storage: Private or template simulation
        """
        from hbp_nrp_backend.storage_client_api.StorageClient import StorageClient
        client = StorageClient()
        if using_storage:
            custom = experiment.environmentModel.customModelPath
            if custom:
                environment_path = self._check_and_extract_environment_zip(
                    experiment)
            else:
                if 'storage://' in environment_path:
                    environment_path = self._copy_storage_environment(
                        experiment)
        else:
            if not environment_path and 'storage://' in experiment.environmentModel.src:
                environment_path = os.path.join(
                    client.get_temp_directory(),
                    os.path.basename(experiment.environmentModel.src))
                with open(environment_path, "w") as f:
                    f.write(
                        client.get_file(
                            UserAuthentication.get_header_token(request),
                            client.get_folder_uuid_by_name(
                                UserAuthentication.get_header_token(request),
                                self.simulation.ctx_id, 'environments'),
                            os.path.basename(experiment.environmentModel.src),
                            byname=True))
            else:
                environment_path = os.path.join(
                    self.models_path, str(experiment.environmentModel.src))
        return environment_path
    def put(self, sim_id):
        """
        Save the simulation CSV recorders' content to the storage.

        :param sim_id: The simulation ID
        :status 500: Error when saving recorder files
        :status 404: {0}
        :status 401: {1}
        :status 200: Success. Files saved into storage
        """

        simulation = _get_simulation_or_abort(sim_id)
        if not UserAuthentication.matches_x_user_name_header(
                request, simulation.owner):
            raise NRPServicesWrongUserException()

        csv_files = simulation.cle.get_simulation_CSV_recorders_files()

        # Done here in order to avoid circular dependencies introduced by the
        # way we __init__ the rest_server module.
        from hbp_nrp_backend.storage_client_api.StorageClient \
            import StorageClient

        client = StorageClient()

        time_string = get_date_and_time_string()
        subfolder_name = string.join(['csv_records', time_string], '_')

        folder_uuid = client.create_folder(
            UserAuthentication.get_header_token(request),
            simulation.experiment_id, subfolder_name)['uuid']

        if csv_files:
            for csv_file in csv_files:
                with open(csv_file.temporary_path) as csvfile:
                    client.create_or_update(
                        UserAuthentication.get_header_token(request),
                        folder_uuid, csv_file.name, csvfile.read(),
                        'text/plain')
        return 200
Example #8
0
    def _copy_storage_environment(self, experiment):
        """
        Copies a storage environment from the storage environment models
        to the running simulation temporary folder

        :param experiment: The experiment object.
        """
        from hbp_nrp_backend.storage_client_api.StorageClient import StorageClient
        client = StorageClient()
        environment_path = os.path.join(
            client.get_temp_directory(),
            os.path.basename(experiment.environmentModel.src))
        with open(environment_path, "w") as f:
            f.write(
                client.get_file(
                    UserAuthentication.get_header_token(request),
                    client.get_folder_uuid_by_name(
                        UserAuthentication.get_header_token(request),
                        self.simulation.ctx_id, 'environments'),
                    os.path.basename(experiment.environmentModel.src),
                    byname=True))
        return environment_path
Example #9
0
    def prepare_record_for_playback(self):
        """
        Copy the record from user storage to tmp and unzip it.

        :> experimentID: The experiment
        :> storagePath: The storage path

        :tmp path: The local path ready to be played
        """

        client = StorageClient()
        file_clone_destination = os.path.join(self.sim_dir,
                                              self.simulation.playback_path)
        dest_path = os.path.dirname(file_clone_destination)

        try:
            if not os.path.exists(dest_path):
                os.makedirs(dest_path)
            with open(file_clone_destination, "w") as file_clone:

                file_contents = client.get_file(
                    UserAuthentication.get_header_token(),
                    urllib.quote_plus(self.simulation.experiment_id +
                                      '/recordings'),
                    os.path.basename(
                        self.simulation.playback_path),  # zip name
                    by_name=True,
                    zipped=True)

                file_clone.write(file_contents)

            ZipUtil.extractall(file_clone_destination, dest_path, True)

            # Update sim object's playback path with folder name
            self.simulation.playback_path = os.path.join(
                dest_path, ZipUtil.get_rootname(file_clone_destination))

            os.remove(file_clone_destination)

        except Exception as ex:
            SimUtil.delete_simulation_dir()
            raise NRPServicesClientErrorException(
                'Copying recording to backend tmp failed with {}'.format(
                    str(ex)),
                error_code=404)
    def _get_sdf_world_from_storage(cls, experiment_id, context_id):
        """
        Download from the storage an sdf world file as a string.
        The file belongs to the experiment identified by experiment_id

        :param experiment_id: the ID of the experiment in which to look for the world sdf
        :param context_id: the context ID for collab based simulations
        :return: The content of the world sdf file
        """
        del context_id  # Unused

        request_token = UserAuthentication.get_header_token()

        # find the sdf filename from the .exc
        experiment_file = cls.storage_client.get_file(
            request_token, experiment_id, 'experiment_configuration.exc', by_name=True)

        world_file_name = exp_conf_api_gen.CreateFromDocument(experiment_file).environmentModel.src

        return cls.storage_client.get_file(
            request_token, experiment_id, world_file_name, by_name=True)
Example #11
0
    def _prepare_custom_environment(self, exc):
        """
        Download and extracts zipped environment defined in the exc

        :param exc: The exc DOM object
        """

        # pylint: disable=too-many-locals
        env_model = Model(exc.environmentModel.model, ResourceType.ENVIRONMENT)
        data = self.__storageClient.get_model(
            UserAuthentication.get_header_token(), self.simulation.ctx_id,
            env_model)

        # if the zip is not there, prompt the user to check his uploaded models
        if not data:
            raise NRPServicesGeneralException(
                "Could not find selected zip {} in the list of uploaded custom models. Please make "
                "sure that it has been uploaded correctly".format(
                    os.path.dirname(exc.environmentModel.model)),
                "Zipped model retrieval failed")

        ZipUtil.extractall(zip_abs_path=io.BytesIO(data),
                           extract_to=os.path.join(self._sim_dir, 'assets'),
                           overwrite=True)
Example #12
0
    def initialize(self, state_change):
        """
        Initializes the simulation

        :param state_change: The state change that caused the simulation to be initialized
        """
        # TODO: fix dependencies so these import are not necessary
        # anymore
        from hbp_nrp_backend.storage_client_api.StorageClient import StorageClient
        simulation = self.simulation
        try:
            using_storage = simulation.private is not None
            if using_storage:
                client = StorageClient()
                clone_folder, experiment_paths = client.clone_all_experiment_files(
                    UserAuthentication.get_header_token(request),
                    simulation.experiment_id)
                self.__experiment_path = experiment_paths['experiment_conf']
                self.__simulation_root_folder = clone_folder

                environment_path = experiment_paths['environment_conf']
            else:
                self.__experiment_path = os.path.join(
                    self.__experiment_path, simulation.experiment_conf)
                self.__simulation_root_folder = os.path.dirname(
                    self.__experiment_path)
                environment_path = simulation.environment_conf
            experiment, environment_path = self._parse_exp_and_initialize_paths(
                self.__experiment_path, environment_path, using_storage)

            simulation.kill_datetime = datetime.datetime.now(timezone) \
                + datetime.timedelta(seconds=experiment.timeout)
            logger.info("simulation timeout initialized")

            simulation_factory_client = ROSCLESimulationFactoryClient()
            simulation_factory_client.create_new_simulation(
                environment_path, self.__experiment_path,
                simulation.gzserver_host, simulation.reservation,
                simulation.brain_processes, simulation.sim_id,
                str(simulation.kill_datetime), simulation.playback_path,
                UserAuthentication.get_header_token(request),
                self.simulation.ctx_id)
            if not simulation.playback_path:
                simulation.cle = ROSCLEClient(simulation.sim_id)
            else:
                simulation.cle = PlaybackClient(simulation.sim_id)
            logger.info("simulation initialized")

        except IOError as e:
            raise NRPServicesGeneralException(
                "Error while accessing simulation models (" + repr(e.message) +
                ")", "Models error")
        except rospy.ROSException as e:
            raise NRPServicesGeneralException(
                "Error while communicating with the CLE (" + repr(e.message) +
                ")", "CLE error")
        except rospy.ServiceException as e:
            raise NRPServicesGeneralException(
                "Error starting the simulation. (" + repr(e.message) + ")",
                "rospy.ServiceException",
                data=e.message)
Example #13
0
    def put(self, experiment_id):
        """
        Save state machines to the storage

        :param path experiment_id: The experiment_id id of the experiment
        :param body source_code: Source code of the state machine as string.
        :status 500: The experiment xml either could not be found or read
        :status 200: Success. File written.
        """
        # Done here in order to avoid circular dependencies introduced by the
        # way we __init__ the rest_server module.
        body = request.get_json(force=True)
        if 'state_machines' not in body:
            raise NRPServicesClientErrorException(
                "State machine code should be sent in "
                "the body under the 'state_machines' key")

        from hbp_nrp_backend.storage_client_api.StorageClient \
            import StorageClient

        client = StorageClient()

        exp_xml_file_path = client.clone_file(
            'experiment_configuration.exc',
            UserAuthentication.get_header_token(request), experiment_id)

        if not exp_xml_file_path:
            return {
                "message": "Failed to clone experiment configuration file"
            }, 500

        experiment = client.parse_and_check_file_is_valid(
            exp_xml_file_path, exp_conf_api_gen.CreateFromDocument,
            exp_conf_api_gen.ExD_)

        if not experiment:
            return {
                "message": "Failed to parse experiment configuration file"
            }, 500

        threads = []
        for sm_name in body['state_machines']:
            sm_node = exp_conf_api_gen.SMACHStateMachine()
            sm_node.id = os.path.splitext(sm_name)[0]
            sm_node.src = sm_name if sm_name.endswith(
                ".exd") else sm_name + ".exd"
            exp_control = exp_conf_api_gen.ExperimentControl()
            exp_control.stateMachine.append(sm_node)
            experiment.experimentControl = exp_control
            t = Thread(target=client.create_or_update,
                       kwargs={
                           'token':
                           UserAuthentication.get_header_token(request),
                           'experiment':
                           experiment_id,
                           'filename':
                           sm_node.src,
                           'content':
                           body['state_machines'][sm_name],
                           'content_type':
                           "application/hbp-neurorobotics.sm+python"
                       })
            t.start()
            threads.append(t)

        t = Thread(target=client.create_or_update,
                   kwargs={
                       'token': UserAuthentication.get_header_token(request),
                       'experiment': experiment_id,
                       'filename': 'experiment_configuration.exc',
                       'content': experiment.toxml("utf-8"),
                       'content_type': "application/hbp-neurorobotics+xml"
                   })
        t.start()
        threads.append(t)
        for thread in threads:
            thread.join()
        return {"message": "Success. Files written to the storage"}, 200
    def put(self, experiment_id):
        """
        Save transfer functions of an experiment to the storage.

        :param path experiment_id: The experiment_id of the experiment where the transfer functions
         will be saved
        :<json body json array of string transfer_functions: the transfer functions as python
        :status 500: BIBI configuration file not found
        :status 500: Error saving file
        :status 404: The experiment_id with the given expreiment ID was not found
        :status 404: The request body is malformed
        :status 200: Success. File written.
        """
        # pylint: disable=too-many-locals
        # Done here in order to avoid circular dependencies introduced by the
        # way we __init__ the rest_server module
        from hbp_nrp_backend.storage_client_api.StorageClient \
            import StorageClient

        body = request.get_json(force=True)
        if 'transfer_functions' not in body:
            raise NRPServicesClientErrorException(
                "Transfer functions code should be sent in "
                "the body under the 'transfer_functions' key")

        client = StorageClient()

        experiment_file = client.get_file(
            UserAuthentication.get_header_token(request),
            experiment_id,
            'experiment_configuration.exc',
            byname=True)

        bibi_filename = exp_conf_api_gen.CreateFromDocument(
            experiment_file).bibiConf.src

        bibi_file_path = client.clone_file(
            bibi_filename, UserAuthentication.get_header_token(request),
            experiment_id)

        bibi = client.parse_and_check_file_is_valid(
            bibi_file_path, bibi_api_gen.CreateFromDocument,
            bibi_api_gen.BIBIConfiguration)
        # Remove all transfer functions from BIBI. Then we save them in a
        # separate python file.
        del bibi.transferFunction[:]
        threads = []
        for transfer_function in body['transfer_functions']:
            transfer_function_name = get_tf_name(transfer_function)
            if transfer_function_name is not None:
                transfer_function_node = bibi_api_gen.PythonTransferFunction()
                transfer_function_node.src = transfer_function_name + ".py"
                bibi.transferFunction.append(transfer_function_node)

                t = Thread(target=client.create_or_update,
                           kwargs={
                               'token':
                               UserAuthentication.get_header_token(request),
                               'experiment':
                               experiment_id,
                               'filename':
                               transfer_function_name + ".py",
                               'content':
                               transfer_function,
                               'content_type':
                               'text/plain'
                           })
                t.start()
                threads.append(t)

        # we need to prettify the parsed bibi
        pretty_bibi = xml.dom.minidom.parseString(
            bibi.toxml("utf-8")).toprettyxml()
        t = Thread(target=client.create_or_update,
                   kwargs={
                       'token': UserAuthentication.get_header_token(request),
                       'experiment': experiment_id,
                       'filename': bibi_filename,
                       'content': pretty_bibi,
                       'content_type': 'text/plain'
                   })
        t.start()
        threads.append(t)
        for x in threads:
            x.join()
        return 200
    def post(self, experiment_id):
        """
        Save the current running experiment SDF back to the storage
        :param experiment_id: The experiment ID
        :param context_id: The context_id of the experiment
        :status 500: Error saving file
        :status 200: Success. File written.
        """
        # pylint: disable=too-many-locals
        body = request.get_json(force=True)
        context_id = body.get('context_id', None)
        # Done here in order to avoid circular dependencies introduced by the
        # way we __init__ the rest_server module.
        from hbp_nrp_backend.storage_client_api.StorageClient \
            import StorageClient
        try:
            rospy.wait_for_service('/gazebo/export_world_sdf', 3)
        except rospy.ROSException as exc:
            raise NRPServicesUnavailableROSService(str(exc))

        dump_sdf_world = rospy.ServiceProxy('/gazebo/export_world_sdf',
                                            ExportWorldSDF)
        robot_pose = []

        try:
            sdf_string = dump_sdf_world().sdf_dump
            tree = ET.fromstring(sdf_string)
            try:
                robot_pose = tree.findall(
                    ".//state/model[@name='robot']/pose")[0].text.split()
            # pylint: disable=bare-except
            except:
                logger.error("Can't retrieve robot position.")
            # Erase all robots from the SDF
            for m in tree.findall(".//model[@name='robot']"):
                m.getparent().remove(m)
            sdf_string = ET.tostring(tree, encoding='utf8', method='xml')
        except rospy.ServiceException as exc:
            raise NRPServicesClientErrorException(
                "Service did not process request:" + str(exc))

        client = StorageClient()

        # find the sdf world filename from the .exc
        exp_xml_file_path = client.clone_file(
            'experiment_configuration.exc',
            UserAuthentication.get_header_token(request), experiment_id)

        experiment_file = client.parse_and_check_file_is_valid(
            exp_xml_file_path, exp_conf_api_gen.CreateFromDocument,
            exp_conf_api_gen.ExD_)

        world_file_name = experiment_file.environmentModel.src

        if 'storage://' in world_file_name:
            world_file_name = os.path.basename(world_file_name)
            client.create_or_update(
                UserAuthentication.get_header_token(request),
                client.get_folder_uuid_by_name(
                    UserAuthentication.get_header_token(request), context_id,
                    'environments'), world_file_name, sdf_string, "text/plain")
        else:
            client.create_or_update(
                UserAuthentication.get_header_token(request), experiment_id,
                world_file_name, sdf_string, "text/plain")

        # Save the robot position in the ExDConf file
        if len(robot_pose) is 6:  # We need 6 elements (from Gazebo)
            experiment_file.environmentModel.robotPose.x = robot_pose[0]
            experiment_file.environmentModel.robotPose.y = robot_pose[1]
            experiment_file.environmentModel.robotPose.z = robot_pose[2]
            quaternion = tf.transformations.quaternion_from_euler(
                float(robot_pose[3]), float(robot_pose[4]),
                float(robot_pose[5]))
            experiment_file.environmentModel.robotPose.ux = quaternion[0]
            experiment_file.environmentModel.robotPose.uy = quaternion[1]
            experiment_file.environmentModel.robotPose.uz = quaternion[2]
            experiment_file.environmentModel.robotPose.theta = quaternion[3]

            client.create_or_update(
                UserAuthentication.get_header_token(request),
                experiment_id, 'experiment_configuration.exc',
                experiment_file.toxml("utf-8"), "text/plain")

        else:
            logger.error("Malformed robot position tag in SDF: " + robot_pose)

        return 200
Example #16
0
    def post(self):
        """
        Creates a new simulation which is neither 'initialized' nor 'started'.
        :< json int brainProcesses: Number of brain processes to use (overrides ExD Configuration)
        :< json string experimentID: The experiment ID of the experiment
        :> json string gzserverHost: The host where gzserver will be run: local for using the same
                                     machine of the backend, lugano to use a dedicated instance on
                                     the Lugano viz cluster
        :< json string reservation: the name of the cluster reservation subsequently used to
                                    allocate a job
        :< json string state: The initial state of the simulation
        :< json boolean private: Defines whether the simulation is based on a private experiment
        :< json string playbackPath: Path to simulation recording to play (optional)
        :< json string ctx-id: The context id of the collab if we are running a collab based
                               simulation

        :> json string owner: The simulation owner (Unified Portal user name or 'hbp-default')
        :> json integer simulationID: The id of the simulation (needed for further REST calls)
        :> json string creationDate: Date of creation of this simulation
        :> json string creationUniqueID: The simulation unique creation ID that is used by the
                                         Frontend to identify this simulation

        :status 400: Experiment configuration is not valid
        :status 401: gzserverHost is not valid
        :status 402: Another simulation is already running on the server
        :status 201: Simulation created successfully
        """
        # Use context manager to lock access to simulations while a new simulation is created
        with SimulationService.comm_lock:
            body = request.get_json(force=True)
            sim_id = len(simulations)
            if 'experimentID' not in body:
                raise NRPServicesClientErrorException(
                    'Experiment ID not given.')

            if ('gzserverHost' in body) and (body.get('gzserverHost')
                                             not in ['local', 'lugano']):
                raise NRPServicesClientErrorException(
                    'Invalid gazebo server host.', error_code=401)

            if True in [
                    s.state not in ['stopped', 'failed'] for s in simulations
            ]:
                raise NRPServicesClientErrorException(
                    'Another simulation is already running on the server.',
                    error_code=409)

            if 'brainProcesses' in body and \
                    (not isinstance(body.get('brainProcesses'), int)
                     or body.get('brainProcesses') < 1):
                raise NRPServicesClientErrorException(
                    'Invalid number of brain processes.')

            sim_gzserver_host = body.get('gzserverHost', 'local')
            sim_reservation = body.get('reservation', None)
            sim_experiment_id = body.get('experimentID', None)
            sim_state = body.get('state', 'created')
            playback_path = body.get('playbackPath', None)
            sim_owner = UserAuthentication.get_user()
            sim_brain_processes = body.get('brainProcesses', 1)
            private = body.get('private', False)
            ctx_id = body.get('ctxId', None)
            token = UserAuthentication.get_header_token()

            sim = Simulation(sim_id,
                             sim_experiment_id,
                             sim_owner,
                             sim_gzserver_host,
                             sim_reservation,
                             sim_brain_processes,
                             sim_state,
                             playback_path=playback_path,
                             private=private,
                             ctx_id=ctx_id,
                             token=token)

            # TODO: remove me. I probably am not used anywhere
            sim.creationUniqueID = body.get('creationUniqueID',
                                            str(time.time() + random.random()))

            simulations.append(sim)

        sim.state = "initialized"

        return marshal(simulations[sim_id], Simulation.resource_fields), 201, {
            'location': api.url_for(SimulationControl, sim_id=sim_id),
            'gzserverHost': sim_gzserver_host
        }
Example #17
0
    def initialize(self, state_change):
        """
        Initializes the simulation

        :param state_change: The state change that caused the simulation to be initialized
        """

        simulation = self.simulation
        if not simulation.playback_path:
            self._sim_dir = SimUtil.init_simulation_dir()

        try:
            if not simulation.private:
                raise NRPServicesGeneralException(
                    "Only private experiments are supported", "CLE error", 500)

            self.__storageClient.clone_all_experiment_files(
                token=UserAuthentication.get_header_token(),
                experiment=simulation.experiment_id,
                destination_dir=self._sim_dir,
                exclude=['recordings/']
                if not simulation.playback_path else [])

            # divine knowledge about the exc name
            self.__experiment_path = os.path.join(
                self._sim_dir, 'experiment_configuration.exc')

            with open(self.__experiment_path) as exd_file:
                exc = exp_conf_api_gen.CreateFromDocument(exd_file.read())

            self._load_state_machines(exc)
            if exc.environmentModel.model:  # i.e., custom zipped environment
                self._prepare_custom_environment(exc)

            simulation.timeout_type = (
                TimeoutType.SIMULATION if exc.timeout.time
                == TimeoutType.SIMULATION else TimeoutType.REAL)

            timeout = exc.timeout.value()

            if simulation.timeout_type == TimeoutType.REAL:
                timeout = datetime.datetime.now(timezone) + datetime.timedelta(
                    seconds=timeout)
                simulation.kill_datetime = timeout
            else:
                simulation.kill_datetime = None

            logger.info("simulation timeout initialized")

            simulation_factory_client = ROSCLESimulationFactoryClient()
            simulation_factory_client.create_new_simulation(
                self.__experiment_path, simulation.gzserver_host,
                simulation.reservation, simulation.brain_processes,
                simulation.sim_id, str(timeout),
                simulation.timeout_type, simulation.playback_path,
                UserAuthentication.get_header_token(), self.simulation.ctx_id,
                self.simulation.experiment_id)
            if not simulation.playback_path:
                simulation.cle = ROSCLEClient(simulation.sim_id)
            else:
                simulation.cle = PlaybackClient(simulation.sim_id)
            logger.info("simulation initialized")

        except IOError as e:
            raise NRPServicesGeneralException(
                "Error while accessing simulation models (" + repr(e.message) +
                ")", "Models error")
        except rospy.ROSException as e:
            raise NRPServicesGeneralException(
                "Error while communicating with the CLE (" + repr(e.message) +
                ")", "CLE error")
        except rospy.ServiceException as e:
            raise NRPServicesGeneralException(
                "Error starting the simulation. (" + repr(e.message) + ")",
                "rospy.ServiceException",
                data=e.message)
        # pylint: disable=broad-except
        except Exception as e:
            raise NRPServicesGeneralException(
                "Error starting the simulation. (" + repr(e) + ")",
                "Unknown exception occured",
                data=e.message)
    def put(self, experiment_id):
        """
        Save a brain model PyNN of an experiment to the storage.
        :param path experiment_id: The experiment id

        :< json body json string data: PyNN script of the model
        :< json body json string brain_populations: neuron populations
        :< json body json string brain_populations: context_id of the sim

        :status 500: {0}
        :status 404: {1}
        :status 400: The request body is malformed
        :status 200: Success. File written.
        """
        from hbp_nrp_backend.storage_client_api.StorageClient \
            import StorageClient
        body = request.get_json(force=True)
        if 'data' not in body:
            raise NRPServicesClientErrorException(
                "Neural network python code should be sent in the body under the 'data' key"
            )
        context_id = body.get('context_id', None)

        # no need to rewrite a get_header function since the user
        # authentication already has one
        # Read the request data
        content_type = UserAuthentication.get_header(request, 'Content-type',
                                                     'text/plain')
        data = body['data']
        brain_populations = body.get('additional_populations')

        # Instantiate the storage client
        client = StorageClient()

        # find the bibi filename from the .exc
        experiment_file = client.get_file(
            UserAuthentication.get_header_token(request),
            experiment_id,
            'experiment_configuration.exc',
            byname=True)

        bibi_filename = exp_conf_api_gen.CreateFromDocument(
            experiment_file).bibiConf.src

        # find the brain filename from the bibi
        bibi_file = client.get_file(
            UserAuthentication.get_header_token(request),
            experiment_id,
            bibi_filename,
            byname=True)
        bibi_file_obj = bibi_api_gen.CreateFromDocument(bibi_file)
        brain_filename = bibi_file_obj.brainModel.file

        if 'storage://' in brain_filename:
            client.create_or_update(
                UserAuthentication.get_header_token(request),
                client.get_folder_uuid_by_name(
                    UserAuthentication.get_header_token(request), context_id,
                    'brains'), os.path.basename(brain_filename), data,
                content_type)
        else:
            client.create_or_update(
                UserAuthentication.get_header_token(request), experiment_id,
                os.path.basename(brain_filename), data, content_type)

        # remove all the populations
        del bibi_file_obj.brainModel.populations[:]

        if brain_populations is not None:
            self.parsePopulations(brain_populations, bibi_file_obj)

        # replace the bibi contents in the storage to match the new brain
        # definition
        client.create_or_update(
            UserAuthentication.get_header_token(request), experiment_id,
            bibi_filename,
            xml.dom.minidom.parseString(
                bibi_file_obj.toxml("utf-8")).toprettyxml(), "text/plain")

        return 200