def delivery_from_tel_plan_contents(tel_contents): pattern = get_control_point_pattern() all_controlpoint_results = re.findall(pattern, tel_contents) mu = np.cumsum([float(result[3]) for result in all_controlpoint_results]) iec_gantry_angle = [float(result[1]) for result in all_controlpoint_results] bipolar_gantry_angle = pymedphys._utilities.transforms.convert_IEC_angle_to_bipolar( # pylint: disable = protected-access iec_gantry_angle ) iec_coll_angle = [float(result[2]) for result in all_controlpoint_results] bipolar_coll_angle = pymedphys._utilities.transforms.convert_IEC_angle_to_bipolar( # pylint: disable = protected-access iec_coll_angle ) mlcs = [convert_mlc_string(result[0]) for result in all_controlpoint_results] jaw_gap = np.array([float(result[4]) for result in all_controlpoint_results]) jaw_field_centre = np.array( [float(result[5]) for result in all_controlpoint_results] ) jaw_a = jaw_field_centre + jaw_gap / 2 jaw_b = -(jaw_field_centre - jaw_gap / 2) jaws = np.vstack([jaw_a, jaw_b]).T return mu, bipolar_gantry_angle, bipolar_coll_angle, mlcs, jaws
def calculate_expanded_mask(contours, dcm_ct, expansion): dx, dy, Cx, Cy, Ox, Oy = get_image_transformation_parameters(dcm_ct) ct_size = np.shape(dcm_ct.pixel_array) new_ct_size = np.array(ct_size) * expansion expanded_mask = np.zeros(new_ct_size) for xyz in contours: x = np.array(xyz[0::3]) y = np.array(xyz[1::3]) z = xyz[2::3] if len(set(z)) != 1: raise ValueError("Expected only one z value for a given contour") r = (((y - Cy) / dy * Oy)) * expansion + (expansion - 1) * 0.5 c = (((x - Cx) / dx * Ox)) * expansion + (expansion - 1) * 0.5 expanded_mask = np.logical_or( expanded_mask, skimage.draw.polygon2mask(new_ct_size, np.array(list(zip(r, c)))), ) return expanded_mask
def from_lists(self, x, y, meta={}): """ import x and y lists Parameters ---------- x : list List of float x values y : list List of float y values meta : dict, optional Returns ------- Profile Examples -------- ``profile = Profile().fron_lists(x_list,data_list)`` """ self.x = np.array(x) self.y = np.array(y) self.__init__(x=x, y=y, meta=meta) return Profile(x=x, y=y, meta=meta)
def random_uncompared_logfiles(index, config, compared_hashes): index_set = set(index.keys()) comparison_set = set(compared_hashes) not_yet_compared = np.array(list(index_set.difference(comparison_set))) field_types = np.array( [ index[file_hash]["delivery_details"]["field_type"] for file_hash in not_yet_compared ] ) file_hashes_vmat = not_yet_compared[field_types == "VMAT"] vmat_filepaths = np.array( [ os.path.join( config["linac_logfile_data_directory"], "indexed", index[file_hash]["filepath"], ) for file_hash in file_hashes_vmat ] ) shuffle_index = np.arange(len(vmat_filepaths)) np.random.shuffle(shuffle_index) return file_hashes_vmat[shuffle_index], vmat_filepaths[shuffle_index]
def visual_circle_and_ellipse(insert_x, insert_y, width, length, circle_centre): t = np.linspace(0, 2 * np.pi) circle = { "x": width / 2 * np.sin(t) + circle_centre[0], "y": width / 2 * np.cos(t) + circle_centre[1], } x_shift, y_shift, rotation_angle = visual_alignment_of_equivalent_ellipse( insert_x, insert_y, width, length, None) rotation_matrix = np.array([ [np.cos(rotation_angle), -np.sin(rotation_angle)], [np.sin(rotation_angle), np.cos(rotation_angle)], ]) ellipse = np.array([length / 2 * np.sin(t), width / 2 * np.cos(t)]).T rotated_ellipse = ellipse @ rotation_matrix translated_ellipse = rotated_ellipse + np.array([y_shift, x_shift]) ellipse = {"x": translated_ellipse[:, 1], "y": translated_ellipse[:, 0]} return circle, ellipse
def _matches_fraction(self, dicom_dataset, fraction_number, gantry_tol=3, meterset_tol=0.5): filtered = self._filter_cps() dicom_metersets = get_fraction_group_beam_sequence_and_meterset( dicom_dataset, fraction_number)[1] dicom_fraction = convert_to_one_fraction_group(dicom_dataset, fraction_number) gantry_angles = get_gantry_angles_from_dicom(dicom_fraction) delivery_metersets = filtered._metersets( # pylint: disable = protected-access gantry_angles, gantry_tol) try: maximmum_diff = np.max( np.abs( np.array(dicom_metersets) - np.array(delivery_metersets))) except ValueError: maximmum_diff = np.inf return maximmum_diff <= meterset_tol
def dicom_dose_interpolate(interp_coords, dicom_dose_dataset): """Interpolates across a DICOM dose dataset. Parameters ---------- interp_coords : tuple(z, y, x) A tuple of coordinates in DICOM order, z axis first, then y, then x where x, y, and z are DICOM axes. dose : pydicom.Dataset An RT DICOM Dose object """ interp_z = np.array(interp_coords[0], copy=False)[:, None, None] interp_y = np.array(interp_coords[1], copy=False)[None, :, None] interp_x = np.array(interp_coords[2], copy=False)[None, None, :] coords, dicom_dose_dataset = zyx_and_dose_from_dataset(dicom_dose_dataset) interpolation = scipy.interpolate.RegularGridInterpolator( coords, dicom_dose_dataset) try: result = interpolation((interp_z, interp_y, interp_x)) except ValueError: print(f"coords: {coords}") raise return result
def _fraction_number(self, dicom_template, gantry_tol=3, meterset_tol=0.5): fractions = dicom_template.FractionGroupSequence if len(fractions) == 1: return fractions[0].FractionGroupNumber fraction_numbers = [ fraction.FractionGroupNumber for fraction in fractions ] fraction_matches = np.array([ self._matches_fraction( dicom_template, fraction_number, gantry_tol=gantry_tol, meterset_tol=meterset_tol, ) for fraction_number in fraction_numbers ]) if np.sum(fraction_matches) < 1: raise ValueError( "A fraction group was not able to be found with the metersets " "and gantry angles defined by the tolerances provided. " "Please manually define the fraction group number.") if np.sum(fraction_matches) > 1: raise ValueError( "More than one fraction group was found that had metersets " "and gantry angles within the tolerances provided. " "Please manually define the fraction group number.") fraction_number = np.array(fraction_numbers)[fraction_matches] return fraction_number
def calculate_anti_aliased_mask(contours, dcm_ct, expansion=5): transformation_params = get_image_transformation_parameters(dcm_ct) dx, dy, Cx, Cy, Ox, Oy = transformation_params x_grid, y_grid, ct_size = get_grid( dcm_ct, transformation_params=transformation_params) new_ct_size = np.array(ct_size) * expansion expanded_mask = np.zeros(new_ct_size) for xyz in contours: x = np.array(xyz[0::3]) y = np.array(xyz[1::3]) z = xyz[2::3] assert len(set(z)) == 1 r = (((y - Cy) / dy * Oy)) * expansion + (expansion - 1) * 0.5 c = (((x - Cx) / dx * Ox)) * expansion + (expansion - 1) * 0.5 expanded_mask = np.logical_or( expanded_mask, skimage.draw.polygon2mask(new_ct_size, np.array(list(zip(r, c)))), ) mask = reduce_expanded_mask(expanded_mask, ct_size[0], expansion) mask = 2 * mask - 1 return x_grid, y_grid, mask
def find_data_index(file_contents): """Searches through the mephysto file for where the BEGIN_DATA and END_DATA appear. This region contains only the raw data in column format. Returns a list of ranges which span each scan region. """ # Find the positions of BEGIN_DATA # Demo of match -- https://regex101.com/r/lR4pS2/5 begin_data_index = np.array([ i for i, item in enumerate(file_contents) if re.search(r"^\t\tBEGIN_DATA$", item) ]).astype(int) # Find the positions of END_DATA # Demo of match -- https://regex101.com/r/lR4pS2/6 end_data_index = np.array([ i for i, item in enumerate(file_contents) if re.search(r"^\t\tEND_DATA$", item) ]).astype(int) # Convert the indices into the range type allowing for easy looping data_index = [ range(begin_data_index[i] + 1, end_data_index[i]) for i in range(len(begin_data_index)) ] return data_index
def find_scan_index(file_contents): """Searches through the mephysto file for where the BEGIN_SCAN and END_SCAN appear. This region contains all of the information for a given scan within the mephysto file. Returns a list of ranges which span each scan region. """ # Find the positions of all BEGIN_SCAN # Demo of match -- https://regex101.com/r/lR4pS2/2 # Demo of ignoring "BEGIN_SCAN_DATA" -- https://regex101.com/r/lR4pS2/4 # Basic guide to regular expressions (regex): # http://www.regular-expressions.info/quickstart.html begin_scan_index = np.array([ i for i, item in enumerate(file_contents) if re.search(r"^\tBEGIN_SCAN\s\s\d+$", item) ]).astype(int) # Find the positions of all END_SCAN # Regex demo here -- https://regex101.com/r/lR4pS2/3 end_scan_index = np.array([ i for i, item in enumerate(file_contents) if re.search(r"^\tEND_SCAN\s\s\d+$", item) ]).astype(int) # Convert the indices into the range type allowing for easy looping scan_index = [ range(begin_scan_index[i] + 1, end_scan_index[i]) for i in range(len(begin_scan_index)) ] return scan_index
def calc_min_distance(cube_definition, contours): vertices = cube_vertices(cube_definition) vectors = cube_vectors(cube_definition) unit_vectors = [vector / np.linalg.norm(vector) for vector in vectors] plane_norms = np.array( [ unit_vectors[1], -unit_vectors[0], -unit_vectors[1], unit_vectors[0], unit_vectors[2], -unit_vectors[2], ] ) plane_points = np.array( [vertices[0], vertices[1], vertices[2], vertices[0], vertices[0], vertices[3]] ) plane_origin_dist = -np.sum(plane_points * plane_norms, axis=1) distance_to_planes = np.dot(plane_norms, contours) + plane_origin_dist[:, None] min_dist_squared = np.min(distance_to_planes ** 2, axis=0) return min_dist_squared
def __init__(self, x=np.array([]), y=np.array([]), meta={}): """ create profile Parameters ---------- x : np.array, optional y : np.array, optional meta : dict, optional Notes ----- Normally created empty, then filled using a method, which returns a new Profile. """ self.x = np.array(x) self.y = np.array(y) self.meta = meta if len(self.x) < 2: self.interp = None else: self.interp = interpolate.interp1d(self.x, self.y, bounds_error=False, fill_value=0.0)
def pull_coords_from_contour_sequence(contour_sequence): contours_by_slice_raw = [item.ContourData for item in contour_sequence] x = [np.array(item[0::3]) for item in contours_by_slice_raw] y = [np.array(item[1::3]) for item in contours_by_slice_raw] z = [np.array(item[2::3]) for item in contours_by_slice_raw] return x, y, z
def field(x, y): x = np.array(x, copy=False) y = np.array(y, copy=False) x_shifted = x - centre[0] y_shifted = y - centre[1] x_rotated, y_rotated = rotate_coords(x_shifted, y_shifted, theta) return width_profile(x_rotated) * length_profile(y_rotated)
def create_dose_function(net_od, dose): net_od = np.array(net_od, copy=False) dose = np.array(dose, copy=False) to_minimise = create_to_minimise(net_od, dose) result = basinhopping(to_minimise, [np.max(dose) / np.max(net_od), 1, 1]) return create_cal_fit(*result.x)
def as_array(self, only_coords: bool = True): """Return the point as a numpy array.""" if only_coords: return np.array([getattr(self, item) for item in self._coord_list]) else: return np.array([ getattr(self, item) for item in self._attr_list if (getattr(self, item) is not None) ])
def calculate_coordinates_shell_1d(distance): """Output the two points that are of the defined distance in one-dimension """ if distance == 0: x_coords = np.array([0]) else: x_coords = np.array([distance, -distance]) return (x_coords,)
def from_snc_profiler(self, file_name, axis): """ import profile form SNC Profiler file Parameters ---------- file_name : string file name with path, .prs axis : string 'tvs' or 'rad' Returns ------- Profile Raises ------ TypeError if axis invalid """ with open(file_name) as profiler_file: munge = "\n".join(profiler_file.readlines()) munge = munge.replace("\t", "").replace(": ", ":") munge = munge.replace(" Time:", "\nTime:") # BREAK 2-ITEM ROWS munge = munge.replace(" Revision:", "\nRevision:") munge = munge.replace("Energy:", "\nEnergy:") munge = munge.replace("Dose:", "\nDose:") munge = munge.replace("Collimator Angle:", "\nCollimator Angle:") munge = munge.split("TYPE")[0].split("\n") # DISCARD NON-METADATA munge = [i.split(":", 1) for i in munge if i and ":" in i] munge = [i for i in munge if i[1]] # DISCARD EMPTY ITEMS meta = dict(munge) with open(file_name) as profiler_file: for row in profiler_file.readlines(): if row[:11] == "Calibration" and "File" not in row: calibs = np.array(row.split())[1:].astype(float) elif row[:5] == "Data:": counts = np.array(row.split()[5:145]).astype(float) elif row[:15] == "Dose Per Count:": dose_per_count = float(row.split()[-1]) dose = counts * dose_per_count * calibs x_vals = [-11.2 + 0.4 * i for i in range(57)] x_prof = list(zip(x_vals, dose[:57])) y_vals = [-16.4 + 0.4 * i for i in range(83)] y_prof = list(zip(y_vals, dose[57:])) if axis == "tvs": return Profile().from_tuples(x_prof, meta=meta) elif axis == "rad": return Profile().from_tuples(y_prof, meta=meta) else: raise TypeError("axis must be 'tvs' or 'rad'")
def calculate_deformability(x_test, y_test, x_data, y_data, z_data): """Return the result of the deformability test. This function takes an array of test points and loops over ``_single_calculate_deformability``. The deformability test applies a shift to the spline to determine whether or not sufficient information for modelling is available. For further details on the deformability test see the *Methods: Defining valid prediction regions of the spline* section within <http://dx.doi.org/10.1016/j.ejmp.2015.11.002>. Parameters ---------- x_test : np.ndarray The x coordinate of the point(s) to test y_test : np.ndarray The y coordinate of the point(s) to test x_data : np.ndarray The x coordinate of the model data to test y_data : np.ndarray The y coordinate of the model data to test z_data : np.ndarray The z coordinate of the model data to test Returns ------- deformability : float The resulting deformability between 0 and 1 representing the ratio of deviation the spline model underwent at the point in question by introducing an outlier at the point in question. """ dim = np.shape(x_test) if np.size(dim) == 0: deformability = _single_calculate_deformability( x_test, y_test, x_data, y_data, z_data) elif np.size(dim) == 1: deformability = np.array([ _single_calculate_deformability(x_test[i], y_test[i], x_data, y_data, z_data) for i in range(dim[0]) ]) else: deformability = np.array([[ _single_calculate_deformability(x_test[i, j], y_test[i, j], x_data, y_data, z_data) for j in range(dim[1]) ] for i in range(dim[0])]) return deformability
def delivery_from_tel_plan_contents(tel_contents): pattern = get_control_point_pattern() all_controlpoint_results = re.findall(pattern, tel_contents) mu = np.cumsum([float(result[4]) for result in all_controlpoint_results]).tolist() iec_gantry_angle = [ float(result[2]) for result in all_controlpoint_results ] bipolar_gantry_angle = pymedphys._utilities.transforms.convert_IEC_angle_to_bipolar( # pylint: disable = protected-access iec_gantry_angle).tolist() iec_coll_angle = [float(result[3]) for result in all_controlpoint_results] bipolar_coll_angle = pymedphys._utilities.transforms.convert_IEC_angle_to_bipolar( # pylint: disable = protected-access iec_coll_angle).tolist() mlcs = [ convert_mlc_string(result[0]) for result in all_controlpoint_results ] jaw_gap = np.array( [float(result[5]) for result in all_controlpoint_results]) jaw_field_centre = np.array( [float(result[6]) for result in all_controlpoint_results]) jaw_a = jaw_field_centre + jaw_gap / 2 jaw_b = -(jaw_field_centre - jaw_gap / 2) jaws = np.vstack([jaw_a, jaw_b]).T.tolist() for i in range(len(mu) - 1, -1, -1): result = all_controlpoint_results[i] if result[ 1] == "2,2": # A nasty hack to attempt to find static fields if i == 0: mu = [0] + mu else: mu = mu[0:i] + [mu[i - 1]] + mu[i::] bipolar_gantry_angle = (bipolar_gantry_angle[0:i] + [bipolar_gantry_angle[i]] + bipolar_gantry_angle[i::]) bipolar_coll_angle = (bipolar_coll_angle[0:i] + [bipolar_coll_angle[i]] + bipolar_coll_angle[i::]) mlcs = mlcs[0:i] + [mlcs[i]] + mlcs[i::] jaws = jaws[0:i] + [jaws[i]] + jaws[i::] elif result[1] != "1,1": raise ValueError( "Detection for static or dynamic control points has fallen down" ) return mu, bipolar_gantry_angle, bipolar_coll_angle, mlcs, jaws
def test_if_in_range(point_test, point_start, point_end): point_test = np.array(point_test) point_start = np.array(point_start) point_end = np.array(point_end) vector = point_end - point_start dot = np.dot(point_test, vector) item = [dot, np.dot(vector, point_start), np.dot(vector, point_end)] item.sort() return item[1] == dot
def group_consecutive_logfiles(file_hashes, index): times = np.array([index[key]["local_time"] for key in file_hashes]).astype(np.datetime64) sort_reference = np.argsort(times) file_hashes = file_hashes[sort_reference] times = times[sort_reference] hours_4 = np.array(60 * 60 * 4).astype(np.timedelta64) split_locations = np.where(np.diff(times) >= hours_4)[0] + 1 return np.split(file_hashes, split_locations)
def apply_transform(xx, yy, transform): xx = np.array(xx, copy=False) yy = np.array(yy, copy=False) xx_flat = np.ravel(xx) transformed = transform @ np.vstack( [xx_flat, np.ravel(yy), np.ones_like(xx_flat)]) xx_transformed = transformed[0] yy_transformed = transformed[1] xx_transformed.shape = xx.shape yy_transformed.shape = yy.shape return xx_transformed, yy_transformed
def decode_msq_mlc(raw_bytes): """Convert MLCs from Mosaiq SQL byte format to cm floats. """ raw_bytes = mosaiq_mlc_missing_byte_workaround(raw_bytes) length = check_all_items_equal_length(raw_bytes, "mlc bytes") if length % 2 == 1: raise ValueError( "There should be an even number of bytes within an MLC record." ) mlc_pos = ( np.array( [ [ struct.unpack("<h", control_point[2 * i : 2 * i + 2]) for i in range(len(control_point) // 2) ] for control_point in raw_bytes ] ) / 100 ) return mlc_pos
def batch_process(image_paths, edge_lengths, bb_diameter=8, penumbra=2, display_figure=True): bb_centres = [] field_centres = [] field_rotations = [] for image_path in image_paths: bb_centre, field_centre, field_rotation = iview_find_bb_and_field( image_path, edge_lengths, bb_diameter=bb_diameter, penumbra=penumbra, display_figure=display_figure, ) bb_centres.append(bb_centre) field_centres.append(field_centre) field_rotations.append(field_rotation) if display_figure: plt.show() data = np.concatenate( [bb_centres, field_centres, np.array(field_rotations)[:, None]], axis=1) return pd.DataFrame( data=data, columns=["BB x", "BB y", "Field x", "Field y", "Rotation"])
def get_dose_grid_structure_mask(structure_name, dcm_struct, dcm_dose): x_dose, y_dose, z_dose = xyz_axes_from_dataset(dcm_dose) xx_dose, yy_dose = np.meshgrid(x_dose, y_dose) points = np.swapaxes(np.vstack([xx_dose.ravel(), yy_dose.ravel()]), 0, 1) x_structure, y_structure, z_structure = pull_structure( structure_name, dcm_struct) structure_z_values = np.array([item[0] for item in z_structure]) mask = np.zeros((len(y_dose), len(x_dose), len(z_dose)), dtype=bool) for z_val in structure_z_values: structure_indices = _get_indices(z_structure, z_val) for structure_index in structure_indices: dose_index = int(np.where(z_dose == z_val)[0]) assert z_structure[structure_index][0] == z_dose[dose_index] structure_polygon = matplotlib.path.Path([ (x_structure[structure_index][i], y_structure[structure_index][i]) for i in range(len(x_structure[structure_index])) ]) mask[:, :, dose_index] = mask[:, :, dose_index] | ( structure_polygon.contains_points(points).reshape( len(y_dose), len(x_dose))) return mask
def _get_indices(z_list, z_val): indices = np.array([item[0] for item in z_list]) # This will error if more than one contour exists on a given slice desired_indices = np.where(indices == z_val)[0] # Multiple contour sets per slice not yet implemented return desired_indices
def find_consecutive_logfiles(field_id_key_map, field_id, filehash, index): keys = np.array(field_id_key_map[field_id]) times = np.array([index[key]["local_time"] for key in keys]).astype(np.datetime64) sort_reference = np.argsort(times) keys = keys[sort_reference] times = times[sort_reference] hours_4 = np.array(60 * 60 * 4).astype(np.timedelta64) delivery_time = np.array(index[filehash]["local_time"]).astype(np.datetime64) within_4_hours_reference = np.abs(delivery_time - times) < hours_4 within_4_hours = keys[within_4_hours_reference].tolist() return within_4_hours
def calc_mu_density_return_grid( mu, mlc, jaw, grid_resolution=__DEFAULT_GRID_RESOLUTION, max_leaf_gap=__DEFAULT_MAX_LEAF_GAP, leaf_pair_widths=__DEFAULT_LEAF_PAIR_WIDTHS, min_step_per_pixel=__DEFAULT_MIN_STEP_PER_PIXEL, ): """DEPRECATED. This is a temporary helper function to provide the old api. """ leaf_pair_widths = np.array(leaf_pair_widths) mu_density = calc_mu_density( mu, mlc, jaw, grid_resolution=grid_resolution, max_leaf_gap=max_leaf_gap, leaf_pair_widths=leaf_pair_widths, min_step_per_pixel=min_step_per_pixel, ) full_grid = get_grid(max_leaf_gap, grid_resolution, leaf_pair_widths) grid_xx, grid_yy = np.meshgrid(full_grid["mlc"], full_grid["jaw"]) return grid_xx, grid_yy, mu_density