def finish_series(series_id): """ Mark the status of current series as finished(4) If all the labeled series in current study arenot annotating, then set current study as finished. :param series_id: :return: """ series_service = SeriesService(get_conn()) series = series_service.query({"series_id": series_id}) if (len(series) > 0): study_id = series[0]['study_id'] series_service.update({"series_id": series_id}, {"status": SERIES_STATUS.finished.value}) series_in_current_study = series_service.query( {"study_id": study_id}) all_complete = True for one_stu in series_in_current_study: status = one_stu['status'] if status == SERIES_STATUS.annotating.value: all_complete = False if all_complete: study_service = StudiesService(get_conn()) study_service.update({"study_id": study_id}, {'status': STUDY_STATUS.finished.value}) return jsonify({}), 200
def list_studies_info(instance_id): """ List all studies basic information. :param instance_id: :return: """ study_service = StudiesService(get_conn()) studies = study_service.query({'instance_id': instance_id}) return jsonify(studies), 200
def workbench(instance_id, study_id, series_id): study_service = StudiesService(get_conn()) study_info = study_service.query({"study_id": study_id}) if len(study_info) > 0 and study_info[0]["instance_id"] == int( instance_id): study_info = study_info[0] else: return render_template("404.html") label_candidate_service = LabelCandidatesService(get_conn()) label_candidates = label_candidate_service.query( {"instance_id": instance_id}) series_service = SeriesService(get_conn()) series = series_service.query({ "study_id": study_id, "series_id": series_id }) if len(series) > 0 and len(series[0]) > 3: series = series[0] series["series_files_list"] = eval(series["series_files_list"]) result = { "instance_id": instance_id, "study_id": study_id, "series_id": series_id, "study_path": os.path.join(study_info["folder_name"], ""), "label_candidates": label_candidates, "series_detail": series, "dim": [ series['x_dimension'], series['y_dimension'], series['z_dimension'] ] } return render_template("workbench.html", data=result)
def add_labels(series_id, file_name): # Temporary mock the user Id. user_id = 1 data = request.data label_service = LabelService(get_conn()) status = label_service.insert(series_id, user_id, file_name, data) update_status = request.args.get('update_status', type=bool, default=False) if update_status: series_service = SeriesService(get_conn()) study_service = StudiesService(get_conn()) instance_service = InstanceService(get_conn()) series = series_service.query({'series_id': series_id}) if len(series) > 0: study_id = series[0]['study_id'] status = series[0]['status'] if status < SERIES_STATUS.annotating.value: series_service.update( {'series_id': series_id}, {'status': SERIES_STATUS.annotating.value}) study = study_service.query({'study_id': study_id}) instance_id = study[0]['instance_id'] if study[0]['status'] != STUDY_STATUS.annotating.value: study_service.update( {'study_id': study_id}, {'status': STUDY_STATUS.annotating.value}) instance = instance_service.query({'instance_id': instance_id}) if instance[0]['status'] != INSTANCE_STATUS.annotating.value: instance_service.update( {'instance_id': instance_id}, {'status': INSTANCE_STATUS.annotating.value}) if status: return jsonify({}), 201 else: return jsonify({status: "Transaction Rollback."}), 404
def wireframe_one_study(instance_id, study_id): t = TicToc() t.tic() study_service = StudiesService(get_conn()) study_info = study_service.query({ "instance_id": instance_id, "study_id": study_id }) t.toc("study_info", restart=True) # study_info 0.012129 seconds. export_service = ExportService(get_conn()) _, _, labels = export_service.save_onestudy_label( study_id, study_info[0], None, []) t.toc("labels gen", restart=True) # labels gen 31.750854 seconds. crossref_service = CrossRefService() label_list = crossref_service.accumulate_contours(labels) t.toc("label_list", restart=True) # label_list 32.659566 seconds. result = crossref_service.merge_dicom_orientation(label_list) t.toc("merge dicom", restart=True) # merge dicom 8.348602 seconds. return jsonify(result), 200
def import_dcm(self, instance_id, data_path): instance_service = InstanceService(self.conn) instance_service.update({'instance_id': instance_id}, {'status': INSTANCE_STATUS.importing_dicom.value}) series_extraction_service = SeriesExtractionService() all_series_list = series_extraction_service.extract_series_from_path(os.path.join(DATA_ROOT, data_path)) study_service = StudiesService(self.conn) series_service = SeriesService(self.conn) for suid in all_series_list: series = all_series_list[suid] patient_uid = 0 study_uid = 0 study_service.insert(instance_id, patient_uid, study_uid, suid, "[]", 0, 0) study = study_service.query({"instance_id": instance_id, "suid": suid}) study = study[0] total_files_number = 0 folder_name_arr = [] for series_path in series: folder_name_arr.append(series_path) one_series = series[series_path][0] patient_uid = one_series.info.PatientID study_uid = one_series.info.StudyID if patient_uid == "" or study_uid == "": patient_uid = "" study_uid = suid # disp_name = "pid:" + one_series.info.PatientID + "_sid:" + one_series.info.StudyID total_files_number += one_series.length series_info = one_series.info if len(one_series.shape) == 2: z_dim = 1 x_dim = one_series.shape[0] y_dim = one_series.shape[1] else: z_dim = one_series.shape[0] x_dim = one_series.shape[1] y_dim = one_series.shape[2] series_service.insert(study['study_id'], one_series.description, series_path, one_series.filenames, one_series.length, series_info.get("WindowWidth"), series_info.get("WindowCenter"), one_series.sampling[1], one_series.sampling[1], one_series.sampling[0], x_dim, y_dim, z_dim, series_info.get("PatientID"), series_info.get("SeriesInstanceUID"), series_info.get("StudyDate") or "", "", "", SERIES_STATUS.init.value) study_service.update({"instance_id": instance_id, "suid": suid}, {"total_files_number": total_files_number, "patient_uid": patient_uid, "study_uid": study_uid, "folder_name": str(folder_name_arr), "status": STUDY_STATUS.ready_to_annotate.value}) instance_service.update({'instance_id': instance_id}, {'status': INSTANCE_STATUS.ready_to_annotate.value}) return True
def test_list_all_studies(self, client): res = client.get(url_for('list_all_studies', instance_id=1)) assert res.json == {} assert res.status == '200 OK' # Setup studies sample data ini_service = InitialService() conn = ini_service.get_connection() instance_service = InstanceService(conn) instance_service.insert("Stroke Annotation Task", "CT", "The stroke CT scans for annotation tasks.", "tests/services/sample_data", 0, 200, 1, 0) result = instance_service.query({}) assert result[0]["instance_id"] == 1 assert result[0]["name"] == "Stroke Annotation Task" studies_service = StudiesService(conn) studies_service.insert(1, "Dicom_691_2", 25, 0, "folder", 100, 1) studies_service.insert(1, "Raw_1003/3CH_tagging", 30, 1, "folder", 100, 1) series_service = SeriesService(conn) series_service.insert(1, "series1", "series1/path", ["1"], 1, "200", "200", "0.5", "0.5", "0.5", 512, 512, 512, 1, 100101, "", "", "") series_service.insert(1, "series2", "series2/path", ["1"], 1, "200", "200", "0.5", "0.5", "0.5", 512, 512, 512, 1, 100101, "", "", "") series_service.insert(2, "series3", "series3/path", ["1"], 1, "200", "200", "0.5", "0.5", "0.5", 512, 512, 512, 1, 100101, "", "", "") series_service.insert(2, "series4", "series4/path", ["1"], 1, "200", "200", "0.5", "0.5", "0.5", 512, 512, 512, 1, 100101, "", "", "") res = client.get(url_for('list_all_studies', instance_id=1)) assert len(res.json[0]) == 2 assert res.json[0][0]["instance_id"] == 1 assert res.json[0][0]["patient_uid"] == "Dicom_691_2" assert res.json[1][0]["instance_id"] == 1 assert res.json[1][0]["patient_uid"] == "Raw_1003/3CH_tagging" assert res.status == '200 OK'
def cross_reference(instance_id): study_service = StudiesService(get_conn()) study_info = study_service.query({"instance_id": instance_id}) return render_template("cmr_reference.html", studies=study_info, instance=instance_id)
def save_studies(self, instance_id, split_entry_num=100, start_idx=0, end_idx=-1, store_type="train", save_pair=True, save_label=True, save_data=True, compression=None): """ Save all annotations and and dicom data for each study in the instance. :param instance_id: :param split_entry_num: create new h5 file if stored entries more than the max split number :param start_idx: start file number. :param end_idx: end of the file number. :param store_type: train or test :param save_pair: True or False. If save the pair, will only save the series which have labels. :param save_label: True or False :param save_data: True or False :param compression: Whether the hdf5 file compressed or not. "gzip|lzf" :return: """ msg_box = [] instance_service = InstanceService(self.conn) instance = instance_service.query({"instance_id": instance_id}) instance_name = instance[0]['name'] study_service = StudiesService(self.conn) studies = study_service.query({"instance_id": instance_id}) study_num = split_entry_num * start_idx if end_idx == -1: end_num = len(studies) else: end_num = split_entry_num * end_idx study_h5 = None time_stamp = str(int(time.time())) for idx, study in enumerate(studies): if idx == study_num and idx < end_num: study_id = study['study_id'] app.logger.info( f"Save study-instance_id:{instance_id}({instance_name}),study_id:{study_id}" ) h5_path = os.path.join(OUTPUT_ROOT, str(instance_id)) if study_num % split_entry_num == 0: if not os.path.exists(h5_path): os.makedirs(h5_path) if study_h5 is not None: study_h5.close() h5_file_name = f"Export-{instance_id}-{instance_name}-{time_stamp}-{study_num // split_entry_num}.h5" study_h5 = h5py.File(os.path.join(h5_path, h5_file_name), 'w') if save_pair: _, saved_label, _ = self.save_onestudy_label( study_id, study, study_h5, msg_box, store_type, compression) self.save_onestudy_dcm(study_id, study, study_h5, msg_box, store_type, compression, saved_label) else: if save_label: self.save_onestudy_label(study_id, study, study_h5, msg_box, store_type, compression) if save_data: self.save_onestudy_dcm(study_id, study, study_h5, msg_box, store_type, compression) study_num += 1 study_h5.close() return msg_box