Exemplo n.º 1
0
def create_dataset(template_specifications, visit_ages=None, dataset_filenames=None, subject_ids=None, dimension=None):
    """
    Creates a longitudinal dataset object from xml parameters. 
    """
    deformable_objects_dataset = []
    if dataset_filenames is not None:
        for i in range(len(dataset_filenames)):
            deformable_objects_subject = []
            for j in range(len(dataset_filenames[i])):
                object_list = []
                reader = DeformableObjectReader()
                for object_id in template_specifications.keys():
                    if object_id not in dataset_filenames[i][j]:
                        raise RuntimeError('The template object with id ' + object_id + ' is not found for the visit '
                                           + str(j) + ' of subject ' + str(i) + '. Check the dataset xml.')
                    else:
                        object_type = template_specifications[object_id]['deformable_object_type']
                        object_list.append(reader.create_object(dataset_filenames[i][j][object_id], object_type,
                                                                dimension))
                deformable_objects_subject.append(DeformableMultiObject(object_list))
            deformable_objects_dataset.append(deformable_objects_subject)

    longitudinal_dataset = LongitudinalDataset(
        subject_ids, times=visit_ages, deformable_objects=deformable_objects_dataset)

    return longitudinal_dataset
Exemplo n.º 2
0
def create_scalar_dataset(group, observations, timepoints):
    """
    Builds a dataset from the given data.
    """

    times = []
    subject_ids = []
    scalars = []

    for subject_id in group:
        if subject_id not in subject_ids:
            subject_ids.append(subject_id)
            times_subject = []
            scalars_subject = []
            for i in range(len(observations)):
                if group[i] == subject_id:
                    times_subject.append(timepoints[i])
                    scalars_subject.append(observations[i])
            assert len(times_subject) > 0, subject_id
            assert len(times_subject) == len(scalars_subject)
            times.append(np.array(times_subject))
            scalars.append(Variable(torch.from_numpy(np.array(scalars_subject)).type(Settings().tensor_scalar_type)))

    longitudinal_dataset = LongitudinalDataset()
    longitudinal_dataset.times = times
    longitudinal_dataset.subject_ids = subject_ids
    longitudinal_dataset.deformable_objects = scalars
    longitudinal_dataset.number_of_subjects = len(subject_ids)
    longitudinal_dataset.total_number_of_observations = len(timepoints)

    return longitudinal_dataset
Exemplo n.º 3
0
def create_image_dataset(group, observations, timepoints):
    times = []
    subject_ids = []
    images = []

    for subject_id in group:
        if subject_id not in subject_ids:
            subject_ids.append(subject_id)
            times_subject = []
            images_subject = []
            for i in range(len(observations)):
                if group[i] == subject_id:
                    times_subject.append(timepoints[i])
                    images_subject.append(observations[i])
            assert len(times_subject) > 0, subject_id
            assert len(times_subject) == len(images_subject)
            times.append(np.array(times_subject))
            images.append(images_subject)

    longitudinal_dataset = LongitudinalDataset()
    longitudinal_dataset.times = times
    longitudinal_dataset.subject_ids = subject_ids
    longitudinal_dataset.deformable_objects = images
    longitudinal_dataset.number_of_subjects = len(subject_ids)
    longitudinal_dataset.total_number_of_observations = len(timepoints)
    longitudinal_dataset.check_image_shapes()
    longitudinal_dataset.order_observations()

    return longitudinal_dataset
Exemplo n.º 4
0
def read_and_create_image_dataset(dataset_filenames, visit_ages, subject_ids, template_specifications):
    """
    Builds a longitudinal dataset of images (non deformable images). Loads everything into memory. #TODO assert on the format of the images !
    """
    deformable_objects_dataset = []

    for i in range(len(dataset_filenames)):
        deformable_objects_subject = []
        for j in range(len(dataset_filenames[i])):
            for object_id in template_specifications.keys():
                if object_id not in dataset_filenames[i][j]:
                    raise RuntimeError('The template object with id ' + object_id + ' is not found for the visit '
                                       + str(j) + ' of subject ' + str(i) + '. Check the dataset xml.')
                else:
                    objectType = template_specifications[object_id]['deformable_object_type']
                    reader = DeformableObjectReader()
                    deformable_object_visit = reader.create_object(dataset_filenames[i][j][object_id], objectType)
                    deformable_object_visit.update()
            deformable_objects_subject.append(deformable_object_visit)
        if len(deformable_objects_subject) <= 1:
            msg = "I have only one observation for subject {}".format(str(i))
            warnings.warn(msg)
        deformable_objects_dataset.append(deformable_objects_subject)

    longitudinal_dataset = LongitudinalDataset()
    longitudinal_dataset.times = [np.array(elt) for elt in visit_ages]
    longitudinal_dataset.subject_ids = subject_ids
    longitudinal_dataset.deformable_objects = deformable_objects_dataset
    longitudinal_dataset.update()
    longitudinal_dataset.check_image_shapes()
    longitudinal_dataset.order_observations()

    return longitudinal_dataset
Exemplo n.º 5
0
                    mean_observation_time_window,
                    math.sqrt(model.get_time_shift_variance()))

                time_between_two_consecutive_visits = observation_time_window / float(
                    number_of_visits - 1)
                age_at_baseline = normal(model.get_reference_time(), math.sqrt(model.get_time_shift_variance())) \
                                  - 0.5 * observation_time_window

                ages = [
                    age_at_baseline + j * time_between_two_consecutive_visits
                    for j in range(number_of_visits)
                ]
                visit_ages.append(ages)

        subject_ids = ['s' + str(i) for i in range(number_of_subjects)]
        dataset = LongitudinalDataset(subject_ids, times=visit_ages)

        logger.info(
            '>> %d subjects will be generated, with %.2f visits on average, covering an average period of %.2f years.'
            %
            (number_of_subjects, float(dataset.total_number_of_observations) /
             float(number_of_subjects),
             np.mean(np.array([ages[-1] - ages[0]
                               for ages in dataset.times]))))
        """
        Generate individual RER.
        """

        # Complementary xml parameters.
        t0 = xml_parameters.t0
        tmin = xml_parameters.tmin