def _get_image_paths_for_analysis(analysis, shared_folder_map):
    """
    Consolidates all paths to images, as local paths, of the given analysis into a list

    :param analysis: The :class:`~server.model.analysis_model.AnalysisModel` instance for which the images should be gathered
    :param shared_folder_map: A dict containing a mapping from SMB URLs to local mount points

    :return: A list of full file paths for all images belonging to the given :class:`~server.model.analysis_model.AnalysisModel`
        instance
    """
    raw_images = list()
    segmented_images = list()
    snapshots = analysis.timestamp.snapshots

    for snapshot in snapshots:
        raw_image_path = None
        segmented_image_path = None
        for image in snapshot.images:
            # Assume that all images of the same snapshot reside in the same directory
            if image.type == 'raw':
                if raw_image_path is None:
                    raw_image_path = os.path.dirname(get_local_path_from_smb(image.path, shared_folder_map))
                    if raw_image_path is None:
                        raise InvalidPathError(image.path, 'The stored image location could not be resolved')
                raw_images.append(os.path.join(raw_image_path, image.filename))
            elif image.type == 'segmented':
                if segmented_image_path is None:
                    segmented_image_path = os.path.dirname(get_local_path_from_smb(image.path, shared_folder_map))
                    if segmented_image_path is None:
                        raise InvalidPathError(image.path, 'The stored image location could not be resolved')
                segmented_images.append(os.path.join(segmented_image_path, image.filename))
    return raw_images, segmented_images
 def purge(self):
     shared_folder_map = current_app.config['SHARED_FOLDER_MAP']
     for postprocess in self.postprocessings:
         postprocess.purge()
     local_path = get_local_path_from_smb(self.report_path, shared_folder_map)
     shutil.rmtree(local_path)
     for image in self.timestamp.snapshots.images.where(ImageModel.type == 'segmented'):
         db.session.delete(image)
def _get_local_image_paths(images, shared_folder_map):
    image_paths = list()
    for image in images:
        if image.type == 'raw':
            local_image_path = os.path.dirname(get_local_path_from_smb(image.path, shared_folder_map))
            if local_image_path is None:
                raise InvalidPathError(image.path, 'The stored image location could not be resolved')
            image_paths.append(os.path.join(local_image_path, image.filename))
    return image_paths
    def generator(name, analysis, postprocessings, raw_image_paths=list(), segmented_image_paths=list()):
        """
        Creates a generator which yields chunks of a zip file containing the relevant result files.

        :param name: The name of the zip file
        :param analysis: The analysis instance which results should be provided

        :return: A generator which yields chunks of the resulting zip file
        """

        def _traverse_and_write(path, root, arcname):
            for traversal_root, dirs, files in os.walk(path):
                for filename in files:
                    file_path = os.path.join(traversal_root, filename)
                    z.write(file_path, os.path.join(arcname, os.path.relpath(file_path, root)))

        z = zipstream.ZipFile(mode='w', compression=zipstream.ZIP_DEFLATED)
        analysis_result_root = get_local_path_from_smb(analysis.export_path, shared_folder_map)
        root = os.path.abspath(os.path.join(analysis_result_root, os.pardir))
        _traverse_and_write(analysis_result_root, root, name)

        for postprocess in postprocessings:
            if postprocess.result_path is not None:
                local_path = get_local_path_from_smb(postprocess.result_path, shared_folder_map)
                arcpath = os.path.join(name, os.path.relpath(local_path, root))
                z.writestr(os.path.join(arcpath, 'note.txt'), postprocess.note)
                _traverse_and_write(local_path, root, name)

        image_basepath = os.path.join(name, 'images')
        for image_path in raw_image_paths:
            arcpath = os.path.join(image_basepath, 'original', os.path.basename(image_path))
            z.write(image_path, arcpath)
        for image_path in segmented_image_paths:
            arcpath = os.path.join(image_basepath, 'segmented', os.path.basename(image_path))
            z.write(image_path, arcpath)

        for chunk in z:
            yield chunk
Beispiel #5
0
 def purge(self):
     # Only allow to delete a snapshot from an uncompleted timestamp
     if not self.timestamp.completed:
         shared_folder_map = current_app.config['SHARED_FOLDER_MAP']
         raw_image_path = None
         for image in self.images:
             if image.type == 'raw':
                 if raw_image_path is None:
                     raw_image_path = get_local_path_from_smb(image.path, shared_folder_map)
                 os.remove(os.path.join(raw_image_path, image.filename))
             db.session.delete(image)
         db.session.delete(self)
         db.session.commit()
         return True
     else:
         return False  # TODO throw exceptions instead of returning true or false
def _get_postproccessing_result_paths(analysis, shared_folder_map):
    """
    Consolidates all paths, as local paths, to folders where postprocessing results of the given analysis are stored into a list

    :param analysis: The :class:`~server.model.analysis_model.AnalysisModel` instance
        for which all postprocess result paths should be gathered
    :param shared_folder_map: A dict containing a mapping from SMB URLs to local mount points

    :return: A list of all result paths (as local paths)
    """
    paths = list()
    for postprocess in analysis.postprocessings:
        if postprocess.result_path is not None:
            local_path = get_local_path_from_smb(postprocess.result_path, shared_folder_map)
            if local_path is not None:
                paths.append(local_path)
            else:
                raise InvalidPathError(postprocess.result_path,
                                       'The path to the postprocessing results could not be resolved')
    return paths
Beispiel #7
0
def invoke_iap_export(timestamp_id, output_path, username, shared_folder_map, task_key, analysis_iap_id=None):
    """
    This Methods represents an RQ Job workload. It should be enqueued into the RQ Analysis Queue and processed by an according worker

    Handles the invokation of data export of an IAP analysis on the IAP server and fetches the result information afterwards.
    The received information is then entered into the database accordingly

    :param timestamp_id: The ID of the :class:`~server.models.timestamp_model.TimestampModel` instance to which the data belongs
    :param output_path: The path, as SMB URL, where the data should be exported to
    :param username: The username of the user invoking this job
    :param analysis_status_id: The ID of the :class:`~server.utils.redis_status_cache.status_object.StatusObject` to which this job belongs
    :param shared_folder_map: A dict containing a mapping between SMB URLs and local paths representing the corresponding mount points
    :param analysis_iap_id: The IAP ID of the analysis on the IAP server

    :return: a dict containing the 'analysis_id' for which the data has been exported
        and the 'path' to which the results have been exported. (All nested inside the 'response' key)
    """
    print('EXECUTE EXPORT')
    job = get_current_job()
    log_store = get_log_store()
    task = AnalysisTask.from_key(get_redis_connection(), task_key)
    channel = get_grpc_channel()
    iap_stub = phenopipe_iap_pb2_grpc.PhenopipeIapStub(channel)
    pipe_stub = phenopipe_pb2_grpc.PhenopipeStub(channel)

    if analysis_iap_id is None:
        analysis_iap_id = job.dependency.result['response']['result_id']
    else:
        analysis_iap_id = analysis_iap_id
    log_store.put(job.id, 'Started Export Job', 0)
    task.update_message('Started Export Job')
    try:
        response = iap_stub.ExportExperiment(
            phenopipe_iap_pb2.ExportRequest(experiment_id=analysis_iap_id, destination_path=output_path)
        )
        remote_job_id = response.job_id
        request = phenopipe_pb2.WatchJobRequest(
            job_id=remote_job_id
        )
        status = pipe_stub.WatchJob(request)
        for msg in status:
            print(msg.message.decode('string-escape'))
            log_store.put(job.id, msg.message.decode('string-escape'), msg.progress)

        response = iap_stub.FetchExportResult(
            phenopipe_pb2.FetchJobResultRequest(job_id=remote_job_id)
        )
        session = get_session()
        analysis = session.query(AnalysisModel) \
            .filter(AnalysisModel.timestamp_id == timestamp_id) \
            .filter(AnalysisModel.iap_id == analysis_iap_id) \
            .one()

        log_store.put(job.id, 'Received Results. Started to parse and add information', 90)
        task.update_message('Received Results. Started to parse and add information')
        image_path = get_local_path_from_smb(response.image_path, shared_folder_map)
        print('Image Path: {}'.format(image_path))
        # TODO handle DB errors
        for image_name in os.listdir(image_path):
            print('Image Name: {}'.format(image_name))
            # Extract information from filename
            snapshot_id, _, new_filename = image_name.partition('_')
            _, _, angle = os.path.splitext(image_name)[0].rpartition('_')

            img = ImageModel(snapshot_id, response.image_path, new_filename, angle, 'segmented')
            session.add(img)
            # rename file and remove the snapshot id
            os.rename(os.path.join(image_path, image_name), os.path.join(image_path, new_filename))
        analysis.export_path = response.path
        analysis.exported_at = datetime.utcnow()
        session.commit()
        log_store.put(job.id, 'Finished Export Job', 100)
        task.update_message('Finished Export Job')
        return create_return_object(JobType.iap_export, timestamp_id,
                                    {'analysis_id': analysis.id, 'path': response.path})
    except grpc.RpcError as e:
        log_store.put(job.id, e.details(), 0)
        task.update_message('Export Job Failed')
        print(e.details())
        raise
Beispiel #8
0
 def purge(self):
     shared_folder_map = current_app.config['SHARED_FOLDER_MAP']
     local_path = get_local_path_from_smb(self.result_path,
                                          shared_folder_map)
     shutil.rmtree(local_path)