def workbench_redict(instance_id, study_id): series_service = SeriesService(get_conn()) series = series_service.query({"study_id": study_id}) if len(series) > 0: series_id = series[0]['series_id'] return redirect( f"/workbench/instance/{instance_id}/study/{study_id}/series/{series_id}", code=302)
def import_dcm(self, instance_id, data_path): instance_service = InstanceService(self.conn) instance_service.update({'instance_id': instance_id}, {'status': INSTANCE_STATUS.importing_dicom.value}) series_extraction_service = SeriesExtractionService() all_series_list = series_extraction_service.extract_series_from_path(os.path.join(DATA_ROOT, data_path)) study_service = StudiesService(self.conn) series_service = SeriesService(self.conn) for suid in all_series_list: series = all_series_list[suid] patient_uid = 0 study_uid = 0 study_service.insert(instance_id, patient_uid, study_uid, suid, "[]", 0, 0) study = study_service.query({"instance_id": instance_id, "suid": suid}) study = study[0] total_files_number = 0 folder_name_arr = [] for series_path in series: folder_name_arr.append(series_path) one_series = series[series_path][0] patient_uid = one_series.info.PatientID study_uid = one_series.info.StudyID if patient_uid == "" or study_uid == "": patient_uid = "" study_uid = suid # disp_name = "pid:" + one_series.info.PatientID + "_sid:" + one_series.info.StudyID total_files_number += one_series.length series_info = one_series.info if len(one_series.shape) == 2: z_dim = 1 x_dim = one_series.shape[0] y_dim = one_series.shape[1] else: z_dim = one_series.shape[0] x_dim = one_series.shape[1] y_dim = one_series.shape[2] series_service.insert(study['study_id'], one_series.description, series_path, one_series.filenames, one_series.length, series_info.get("WindowWidth"), series_info.get("WindowCenter"), one_series.sampling[1], one_series.sampling[1], one_series.sampling[0], x_dim, y_dim, z_dim, series_info.get("PatientID"), series_info.get("SeriesInstanceUID"), series_info.get("StudyDate") or "", "", "", SERIES_STATUS.init.value) study_service.update({"instance_id": instance_id, "suid": suid}, {"total_files_number": total_files_number, "patient_uid": patient_uid, "study_uid": study_uid, "folder_name": str(folder_name_arr), "status": STUDY_STATUS.ready_to_annotate.value}) instance_service.update({'instance_id': instance_id}, {'status': INSTANCE_STATUS.ready_to_annotate.value}) return True
def list_all_series(study_id): """ List all studies under one instance. :param instance_id: :return: """ series_service = SeriesService(get_conn()) series = series_service.query({"study_id": study_id}) if len(series) == 0: return jsonify({}), 200 return jsonify(series), 200
def list_all_studies(instance_id): """ List all studies with their series info under one instance. :param instance_id: :return: """ series_service = SeriesService(get_conn()) studies = series_service.query_study_series(instance_id) # studies_service = StudiesService(get_conn()) # studies = studies_service.query({"instance_id": instance_id}) if len(studies) == 0: return jsonify({}), 200 return jsonify(studies), 200
def import_annotations(self, load_type, annotation_path, erase_old=True): """ Load saved annotations. Please be caution! This operation will firstly erase old masks if not set erase_old as False and then replace with the new one. :param load_type: 'h5'|'mhd' :param annotation_path: :param erase_old: :return: """ if load_type == "h5": labeldb = h5py.File(annotation_path, 'r') # studyService = StudiesService(self.conn) seriesService = SeriesService(self.conn) labelService = LabelService(self.conn) labeldb = labeldb['train'] for study_and_series_id in labeldb.keys(): split_str = study_and_series_id.split("-") study_uid = split_str[0][6:] series_uid = split_str[1][7:] series = seriesService.query({"series_instance_uid": series_uid}) if len(series) > 0: slice_file_name = eval(series[0]['series_files_list']) content_3D = labeldb[f"study:{study_uid}-series:{series_uid}/label"][()] print(f"Import label for: study:{study_uid}-series:{series_uid}/") for i in range(len(slice_file_name)): content_2D = content_3D[:, :, i] x_dim = content_2D.shape[0] y_dim = content_2D.shape[1] content_1D = np.reshape(content_2D, x_dim * y_dim) unique_id = np.unique(content_1D).tolist() compressed_content_1D = LabelService.compress_content(content_1D) content = { "labelmap2D": {"pixelData": compressed_content_1D, "segmentsOnLabelmap": unique_id, "dataLength": content_1D.shape[0]}} labelService.insert(series[0]['series_id'], 1, slice_file_name[i], str.encode(json.dumps(content))) seriesService.update({"series_instance_uid": series_uid}, {SERIES_STATUS.mask_is_ready.value}) elif load_type == 'mhd': raise NotImplementedError()
def workbench(instance_id, study_id, series_id): study_service = StudiesService(get_conn()) study_info = study_service.query({"study_id": study_id}) if len(study_info) > 0 and study_info[0]["instance_id"] == int( instance_id): study_info = study_info[0] else: return render_template("404.html") label_candidate_service = LabelCandidatesService(get_conn()) label_candidates = label_candidate_service.query( {"instance_id": instance_id}) series_service = SeriesService(get_conn()) series = series_service.query({ "study_id": study_id, "series_id": series_id }) if len(series) > 0 and len(series[0]) > 3: series = series[0] series["series_files_list"] = eval(series["series_files_list"]) result = { "instance_id": instance_id, "study_id": study_id, "series_id": series_id, "study_path": os.path.join(study_info["folder_name"], ""), "label_candidates": label_candidates, "series_detail": series, "dim": [ series['x_dimension'], series['y_dimension'], series['z_dimension'] ] } return render_template("workbench.html", data=result)
def test_list_all_studies(self, client): res = client.get(url_for('list_all_studies', instance_id=1)) assert res.json == {} assert res.status == '200 OK' # Setup studies sample data ini_service = InitialService() conn = ini_service.get_connection() instance_service = InstanceService(conn) instance_service.insert("Stroke Annotation Task", "CT", "The stroke CT scans for annotation tasks.", "tests/services/sample_data", 0, 200, 1, 0) result = instance_service.query({}) assert result[0]["instance_id"] == 1 assert result[0]["name"] == "Stroke Annotation Task" studies_service = StudiesService(conn) studies_service.insert(1, "Dicom_691_2", 25, 0, "folder", 100, 1) studies_service.insert(1, "Raw_1003/3CH_tagging", 30, 1, "folder", 100, 1) series_service = SeriesService(conn) series_service.insert(1, "series1", "series1/path", ["1"], 1, "200", "200", "0.5", "0.5", "0.5", 512, 512, 512, 1, 100101, "", "", "") series_service.insert(1, "series2", "series2/path", ["1"], 1, "200", "200", "0.5", "0.5", "0.5", 512, 512, 512, 1, 100101, "", "", "") series_service.insert(2, "series3", "series3/path", ["1"], 1, "200", "200", "0.5", "0.5", "0.5", 512, 512, 512, 1, 100101, "", "", "") series_service.insert(2, "series4", "series4/path", ["1"], 1, "200", "200", "0.5", "0.5", "0.5", 512, 512, 512, 1, 100101, "", "", "") res = client.get(url_for('list_all_studies', instance_id=1)) assert len(res.json[0]) == 2 assert res.json[0][0]["instance_id"] == 1 assert res.json[0][0]["patient_uid"] == "Dicom_691_2" assert res.json[1][0]["instance_id"] == 1 assert res.json[1][0]["patient_uid"] == "Raw_1003/3CH_tagging" assert res.status == '200 OK'
def finish_series(series_id): """ Mark the status of current series as finished(4) If all the labeled series in current study arenot annotating, then set current study as finished. :param series_id: :return: """ series_service = SeriesService(get_conn()) series = series_service.query({"series_id": series_id}) if (len(series) > 0): study_id = series[0]['study_id'] series_service.update({"series_id": series_id}, {"status": SERIES_STATUS.finished.value}) series_in_current_study = series_service.query( {"study_id": study_id}) all_complete = True for one_stu in series_in_current_study: status = one_stu['status'] if status == SERIES_STATUS.annotating.value: all_complete = False if all_complete: study_service = StudiesService(get_conn()) study_service.update({"study_id": study_id}, {'status': STUDY_STATUS.finished.value}) return jsonify({}), 200
def add_labels(series_id, file_name): # Temporary mock the user Id. user_id = 1 data = request.data label_service = LabelService(get_conn()) status = label_service.insert(series_id, user_id, file_name, data) update_status = request.args.get('update_status', type=bool, default=False) if update_status: series_service = SeriesService(get_conn()) study_service = StudiesService(get_conn()) instance_service = InstanceService(get_conn()) series = series_service.query({'series_id': series_id}) if len(series) > 0: study_id = series[0]['study_id'] status = series[0]['status'] if status < SERIES_STATUS.annotating.value: series_service.update( {'series_id': series_id}, {'status': SERIES_STATUS.annotating.value}) study = study_service.query({'study_id': study_id}) instance_id = study[0]['instance_id'] if study[0]['status'] != STUDY_STATUS.annotating.value: study_service.update( {'study_id': study_id}, {'status': STUDY_STATUS.annotating.value}) instance = instance_service.query({'instance_id': instance_id}) if instance[0]['status'] != INSTANCE_STATUS.annotating.value: instance_service.update( {'instance_id': instance_id}, {'status': INSTANCE_STATUS.annotating.value}) if status: return jsonify({}), 201 else: return jsonify({status: "Transaction Rollback."}), 404
def test_list_series_in_one_study(self, client): ini_service = InitialService() conn = ini_service.get_connection() series_service = SeriesService(conn) series_service.delete({"study_id": 1}) series_service.delete({"study_id": 2}) series_service.insert( 2, "series_description1", "series_path", "['export0001.dcm','export0002.dcm','export0003.dcm','export0004.dcm']", 4, None, None, None, None, None, None, None, None, None, None, None, None, None) res = client.get(url_for('list_all_series', study_id=1)) assert res.json == {} res = client.get(url_for('list_all_series', study_id=2)) assert len(res.json) == 1 assert res.json[0]["study_id"] == 2 assert res.json[0]["series_description"] == "series_description1" assert len(eval(res.json[0]["series_files_list"])) == 4
def save_onestudy_dcm(self, study_id, study, study_h5, msg_box, store_type="train", compression=None, labeled_list=None): series_service = SeriesService(self.conn) if labeled_list is None: series = series_service.query({"study_id": study_id}) else: series = labeled_list for i in series: # Load DICOM data. series_uuid = i['series_instance_uid'] file_list = [ os.path.join(i['series_path'], file_name) for file_name in eval(i['series_files_list']) ] series_dcm = [] try: for file in file_list: file_dcm = pydicom.dcmread(file, force=True) series_dcm.append(file_dcm.pixel_array) except Exception as e: app.logger.warn( f"Read dicom file error: {file}, series uuid:{series_uuid}, with error:{e}" ) continue series_dcm = np.stack(series_dcm) series_dcm = np.moveaxis(series_dcm, 0, -1) # if the dimension of original dicom and label mismatch should throw an error, and log. # if series_dcm.shape != series_label.shape: # app.logger.warn( # f"The original DICOM shape({series_dcm.shape}) mismatch with the label's shape({series_label.shape})") dcm = study_h5.create_dataset( f"{store_type}/study:{study['suid']}-series:{series_uuid}/data", data=series_dcm, compression=compression) dcm.attrs['x_spacing'] = i['x_spacing'] dcm.attrs['y_spacing'] = i['y_spacing'] dcm.attrs['z_spacing'] = i['z_spacing'] dcm.attrs['patient_id'] = i['patient_id'] dcm.attrs['files'] = i['series_files_list'] dcm.attrs['path'] = i['series_path'] dcm.attrs['series_id'] = i['series_id'] dcm.attrs['study_id'] = i['study_id'] dcm.attrs['description'] = i["series_description"] app.logger.debug( f"Save one series dcm- path:{i['series_path']}, series_id:{i['series_id']}, " f"h5path: {store_type}/study:{study['suid']}-series:{series_uuid}/data" ) msg_box.append( f"Save one series dcm- path:{i['series_path']}, series_id:{i['series_id']}, " f"h5path: {store_type}/study:{study['suid']}-series:{series_uuid}/data" ) return msg_box
def save_onestudy_label(self, study_id, study, study_h5, msg_box, store_type="train", compression=None): series_service = SeriesService(self.conn) label_service = LabelService(self.conn) series = series_service.query({"study_id": study_id}) labeled_series_list = [] labels_list = [] # t.toc("before iter", restart=True) for i in series: series_id = i['series_id'] labels = label_service.query({"series_id": series_id}) if len(labels) == 0: app.logger.debug( f"The current series don't have labels:{i['series_id']}") continue else: labeled_series_list.append(i) app.logger.debug(f"Processing current series:{i['series_id']}") series_uuid = i['series_instance_uid'] x_dim = int(i['x_dimension']) y_dim = int(i['y_dimension']) z_dim = int(i['z_dimension']) series_files_list = eval(i['series_files_list']) series_label = np.zeros((x_dim, y_dim, z_dim)) # t.toc("before label iter", restart=True) for label in labels: file_id = label['file_id'] content = eval(label['content']) pixel_data = content['labelmap2D']['pixelData'] # t.tic() for label_data in pixel_data: pixel_data_xy = np.zeros((x_dim * y_dim)) label_int = int(float(label_data)) # content_1D = np.reshape(pixel_data_xy, x_dim * y_dim) pixel_data_xy[pixel_data[label_data]] = label_int pixel_data_xy = np.reshape(pixel_data_xy, (x_dim, y_dim)) # lv_label = np.zeros_like(pixel_data_xy) # lv_label[pixel_data_xy == 2] = 1 # pixel_data_xy = remove_small_2d(lv_label) z_index = series_files_list.index(file_id) series_label[:, :, z_index] += pixel_data_xy # t.toc("after process of pixel_data", restart=True) label_obj = { "data": series_label, "study": study['suid'], "series": series_uuid, "x_spacing": i['x_spacing'], "y_spacing": i['y_spacing'], "z_spacing": i['z_spacing'], "patient_id": i['patient_id'], "files": i['series_files_list'], "path": i['series_path'], "series_id": i['series_id'], "study_id": i['study_id'], "description": i["series_description"] } labels_list.append(label_obj) if study_h5 is not None: label_db = study_h5.create_dataset( f"{store_type}/study:{study['suid']}-series:{series_uuid}/label", data=series_label, compression=compression) label_db.attrs['x_spacing'] = i['x_spacing'] label_db.attrs['y_spacing'] = i['y_spacing'] label_db.attrs['z_spacing'] = i['z_spacing'] label_db.attrs['patient_id'] = i['patient_id'] label_db.attrs['files'] = i['series_files_list'] label_db.attrs['path'] = i['series_path'] label_db.attrs['series_id'] = i['series_id'] label_db.attrs['study_id'] = i['study_id'] label_db.attrs['description'] = i["series_description"] app.logger.debug( f"Save one series label - path:{i['series_path']}, series_id:{i['series_id']}, " f"h5path: study:{study['suid']}/series:{series_uuid}/label" ) msg_box.append( f"Save one series - path:{i['series_path']}, series_id:{i['series_id']}, " f"h5path: study:{store_type}/{study['suid']}-series:{series_uuid}/label" ) return msg_box, labeled_series_list, labels_list