def get_logger(name): logger = logging.getLogger(name) file_name = str(datetime.date.today()) + ".log" logger.setLevel(logging.DEBUG) # create console handler and set level to debug ch = logging.StreamHandler() ch.setLevel(logging.DEBUG) # create formatter formatter = logging.Formatter( "[%(asctime)s] %(levelname)s %(filename)s %(lineno)s %(message)s" ) # add formatter to ch ch.setFormatter(formatter) logger.addHandler(ch) log_path = os.path.join(base_dir, name) if not os.path.exists(log_path): os.mkdir(log_path) log_file_path = os.path.join(log_path, file_name) handler = logging.handlers.RotatingFileHandler(log_file_path, maxBytes=1024 * 1024) dir_list = [os.path.join(log_path, file) for file in os.listdir(log_path)] for log in dir_list: create_time = int(os.path.getctime(log)) if int(time.time()) - create_time >= 3600 * 24 * 3: os.remove(log) handler.setFormatter(formatter) handler.setLevel(logging.ERROR) logger.addHandler(handler) return logger
def main(): # logger = logging.getLogger() logger.setLevel(logging.WARNING) ch = logging.StreamHandler() logger.addHandler(ch) #logger.debug('input params') data3d_a_path = None # os.path.join(path_to_script, '../../../data/medical/data_orig/sliver07/training-part1/liver-orig001.mhd') data3d_b_path = None #os.path.join(path_to_script, '../../../data/medical/data_orig/sliver07/training-part1/liver-seg001.mhd') parser = argparse.ArgumentParser( description='Visualization of sliver data and our segmentation') parser.add_argument( '-dd', '--densityData', help='path to input data with density. It can be Dicom or pklz', default=data3d_a_path) parser.add_argument( '-sa', '--segmentationA', help='path to input (sliver) segmentation Dicom or pklz', default=data3d_b_path) parser.add_argument( '-sb', '--segmentationB', help='path to out pklz or pkl segmentation. Dicom is not supported.', default=None) args = parser.parse_args() show(args.densityData, args.segmentationA, args.segmentationB)
def main(): # logger = logging.getLogger() logger.setLevel(logging.WARNING) ch = logging.StreamHandler() logger.addHandler(ch) # logger.debug('input params') # input parser parser = argparse.ArgumentParser(description='\ 3D visualization of segmentation\n\ \npython show_segmentation.py\n\ \npython show_segmentation.py -i resection.pkl -l 2 3 4 -d 4') parser.add_argument('-i', '--inputfile', default='organ.pkl', help='input file') parser.add_argument('-o', '--outputfile', default='~/lisa_data/mesh_geom.vtk', help='output file') parser.add_argument('-d', '--degrad', type=int, default=4, help='data degradation, default 4') parser.add_argument( '-r', '--resize', type=float, default=None, help='resize voxel to defined size in milimeters, default is None') parser.add_argument('-l', '--label', type=int, metavar='N', nargs='+', default=[1], help='segmentation labels, default 1') args = parser.parse_args() # data = misc.obj_from_file(args.inputfile, filetype='pickle') import io3d data = io3d.read(args.inputfile, dataplus_format=True) # args.label = np.array(eval(args.label)) # print args.label # import pdb; pdb.set_trace() ds = np.zeros(data['segmentation'].shape, np.bool) for i in range(0, len(args.label)): ds = ds | (data['segmentation'] == args.label[i]) outputfile = os.path.expanduser(args.outputfile) showSegmentation(ds, degrad=args.degrad, voxelsize_mm=data['voxelsize_mm'], vtk_file=outputfile, resize_mm=args.resize)
def main(): # logger = logging.getLogger() logger.setLevel(logging.DEBUG) ch = logging.StreamHandler() logger.addHandler(ch) # create file handler which logs even debug messages # fh = logging.FileHandler('log.txt') # fh.setLevel(logging.DEBUG) # formatter = logging.Formatter( # '%(asctime)s - %(name)s - %(levelname)s - %(message)s') # fh.setFormatter(formatter) # logger.addHandler(fh) # logger.debug('start') # input parser parser = argparse.ArgumentParser( description=__doc__ ) parser.add_argument( '-i', '--inputfile', default=None, required=True, help='input file' ) parser.add_argument( '-d', '--debug', action='store_true', help='Debug mode') args = parser.parse_args() if args.debug: ch.setLevel(logging.DEBUG)
def main(): # logger = logging.getLogger() logger.setLevel(logging.WARNING) ch = logging.StreamHandler() logger.addHandler(ch) update()
def create_logger(log_dir: str, logger_name: str): # 创建一个logger logger = logging.getLogger(logger_name) logger.setLevel(logging.DEBUG) if not os.path.exists(log_dir): os.mkdir(log_dir) # 创建一个handler, timestamp = time.strftime("%Y.%m.%d_%H.%M.%S", time.localtime()) log_filename = f"{log_dir}/log_{logger_name}_{timestamp}.txt" fh = logging.FileHandler(log_filename) fh.setLevel(logging.DEBUG) # 再创建一个handler,用于输出到控制台 ch = logging.StreamHandler() ch.setLevel(logging.DEBUG) # 定义handler的输出格式 formatter = logging.Formatter( '[%(asctime)s][%(levelname)s] ## %(message)s') fh.setFormatter(formatter) ch.setFormatter(formatter) # 给logger添加handler logger.addHandler(fh) logger.addHandler(ch) return logger
def main(): # logger = logging.getLogger() logger.setLevel(logging.WARNING) ch = logging.StreamHandler() logger.addHandler(ch) #logger.debug('input params') data3d_a_path = None # os.path.join(path_to_script, '../../../data/medical/data_orig/sliver07/training-part1/liver-orig001.mhd') data3d_b_path = None #os.path.join(path_to_script, '../../../data/medical/data_orig/sliver07/training-part1/liver-seg001.mhd') parser = argparse.ArgumentParser( description='Information about pkl/pklz file') parser.add_argument('pklzFile', help='path to data' ) args = parser.parse_args() data = misc.obj_from_file(args.pklzFile, 'pickle') print(data) try: pyed = sed3.sed3(data['data3d'], contour=data['segmentation']) pyed.show() except: try: pyed = sed3.sed3(data['data3d']) pyed.show() except: print("Problem with visualization")
def main(): # logger = logging.getLogger() logger.setLevel(logging.WARNING) ch = logging.StreamHandler() logger.addHandler(ch) #logger.debug('input params') # input parser parser = argparse.ArgumentParser( description='Module for segmentation of simple anatomical structures') parser.add_argument('-i', '--inputfile', default='organ.pkl', help='path to data dir') args = parser.parse_args() data = misc.obj_from_file(args.inputfile, filetype='pickle') data3d = data['data3d'] voxelsize_mm = data['voxelsize_mm'] ss = SimpleSegmentation() simple_seg = ss.simple_segmentation(data3d, voxelsize_mm) #visualization pyed = sed3.sed3(data['data3d'], seeds=simple_seg) pyed.show() # save savestring = raw_input('Save output data? (y/n): ') if savestring in ['Y', 'y']: misc.obj_to_file(data, "resection.pkl", filetype='pickle')
def start_flask_server(daemonize=True, debug=True): if Path(PID_PATH).exists(): pid = int(Path(PID_PATH).read_text()) if pid in psutil.pids(): clrc.info( "It seems daemon is already running. Use restart command to restart it" ) return else: clrc.warn( "It seems daemon was not stopped correctly the last time. PID file exists, and PID inside it do not match any running process. Please remove PID file manually: {0}" .format(PID_PATH)) clrc.info("After removing PID file daemon should start as usual") return if daemonize: if debug: clrc.info("Information:") clrc.info("Database is at: {0}".format(DATABASE_PATH)) clrc.info("Daemon process PID file is at: {0}".format(PID_PATH)) if LOG_PATH: clrc.info( "You specified LOG_PATH. It's at: {0}".format(LOG_PATH)) if (_can_create_pid_file() and _can_create_logs()): clrc.info("Starting daemon...") clrc.info( "Daemon started successfully. You can access your server at http://{0}:{1}" .format(HOST, PORT)) clrc.info( "If you are not able to access the web server and sure this is not a problem with firewall/closed port etc, please check logs here: {0}" .format(LOG_PATH)) logger = logging.getLogger() logger.setLevel(logging.DEBUG) fh = logging.FileHandler(LOG_PATH) logger.addHandler(fh) # Fix to work with pyinstaller onefile fds_to_myself = [] if getattr(sys, 'frozen', False): fds_to_myself = [ of.fd for of in psutil.Process(os.getpid()).open_files() if of.path == sys.executable ] with daemon.DaemonContext( pidfile=pidlockfile.PIDLockFile(PID_PATH), stdout=fh.stream, stderr=fh.stream, files_preserve=[fh.stream] + fds_to_myself): _run_flask_app() clrc.success( "Daemon started! You can access server at {0}:{1}".format( HOST, PORT)) else: _run_flask_app(debug=True)
def main(): # logger = logging.getLogger() logger.setLevel(logging.WARNING) ch = logging.StreamHandler() logger.addHandler(ch) # logger.debug('input params') # input parser parser = argparse.ArgumentParser( description='Module for segmentation of simple anatomical structures') parser.add_argument('-i', '--inputfile', default='vessels.pkl', help='path to data dir') # args = parser.parse_args() # horsi kvalita segmentace # datapath = os.path.join(path_to_script, "../vessels1.pkl") # hypodenzni meta # datapath = os.path.join(path_to_script, args.inputfile) # horsi kvalita segmentace # datapath = os.path.join(path_to_script, "../organ.pkl") # data = misc.obj_from_file(args.inputfile, filetype = 'pickle') dcmdir = '/home/tomas/Dropbox/Work/Data/medical/org-38289898-export1.pklz' data = misc.obj_from_file(dcmdir, filetype='pickle') # windowing data['data3d'] = tools.windowing(data['data3d'], level=50, width=350) # data['data3d'] = smoothing(data['data3d'], sliceId=0) # smoothing ---------------- # bilateral # data['data3d'] = tools.smoothing_bilateral(data['data3d'], # sigma_space=15, sigma_color=0.05, sliceId=0) # more bilateral # data['data3d'] = tools.smoothing_bilateral(data['data3d'], # sigma_space=15, sigma_color=0.1, sliceId=0) # total-variation data['data3d'] = tools.smoothing_tv(data['data3d'], weight=0.05, sliceId=0) # more total-variation # data['data3d'] = tools.smoothing_tv(data['data3d'], weight=0.2, # multichannel=False, sliceId=0) # sed3.sed3(data['data3d']).show() tumory = Lesions() # tumory.overlay_test() tumory.import_data(data) tumory.automatic_localization() tumors = tumory.segmentation == tumory.data['slab']['lesions'] sed3.sed3(tumory.data3d, contour=tumors).show()
def get_logger(name=None, level=None): if name not in __loggers__: logger = setup_logger(name=name, level=level) else: logger = __loggers__[name] if level is not None: logger.setLevel(level) for handler in logger.handlers: handler.setLevel(level) return logger
def setup_custom_logger(name): formatter = logging.Formatter('%(asctime)s,%(msecs)d %(levelname)-8s [%(filename)s:%(lineno)d] %(message)s') handler = logging.StreamHandler() handler.setFormatter(formatter) logger = logging.getLogger(name) logger.setLevel(logging.DEBUG) logger.addHandler(handler) return logger
def main(): # logger = logging.getLogger() logger.setLevel(logging.DEBUG) ch = logging.StreamHandler() logger.addHandler(ch) # create file handler which logs even debug messages # fh = logging.FileHandler('log.txt') # fh.setLevel(logging.DEBUG) # formatter = logging.Formatter( # '%(asctime)s - %(name)s - %(levelname)s - %(message)s') # fh.setFormatter(formatter) # logger.addHandler(fh) # logger.debug('start') # input parser parser = argparse.ArgumentParser( description=__doc__ ) # parser.add_argument( # '-i', '--inputfile', # default=None, # required=True, # help='input file' # ) parser.add_argument( '-o', '--outputfile', default="~/lisa_data/liver_intensity.Model.p", help='output file' ) parser.add_argument( '-fv', '--extern_fv', default=None, help='string describing extern feature vector function' ) parser.add_argument( '-d', '--debug', action='store_true', help='Debug mode') args = parser.parse_args() modelparams={} if args.debug: ch.setLevel(logging.DEBUG) if args.extern_fv is not None: modelparams.update({ 'fv_type': "fv_extern", 'fv_extern': args.extern_fv, }) train_liver_model_from_sliver_data(args.outputfile, modelparams=modelparams)
def setup(logging_level="INFO", logger_file=None): if logger_file is None: logging.basicConfig(handlers=[InterceptHandler()], level=logging_level) else: logger = logging.getLogger() logging_level = getattr(logging, logging_level) logger.setLevel(logging_level) handler = logging.FileHandler(logger_file, "w", encoding="UTF-8") handler.setFormatter(logging.Formatter("%(asctime)s %(levelname)s %(threadName)s %(name)s %(message)s")) logger.addHandler(handler)
def enable_std_logging(name: str = "bllb", lvl: Union[int, str] = "DEBUG") -> object: """Enable standard logging library, return logger.""" log_format = "%(asctime)s : %(levelname)s \t: %(message)s" logger = logging.getLogger(name) logger.setLevel(lvl) logging.disabled = False logging.disable(logging.NOTSET) logging.basicConfig(format=log_format, level=lvl) logger.info(f"Imported standard library logging module.\n\t Level: {lvl}") filterwarnings("default") return logger
def main(): # logger = logging.getLogger() logger.setLevel(logging.DEBUG) ch = logging.StreamHandler() logger.addHandler(ch) # create file handler which logs even debug messages # fh = logging.FileHandler('log.txt') # fh.setLevel(logging.DEBUG) # formatter = logging.Formatter( # '%(asctime)s - %(name)s - %(levelname)s - %(message)s') # fh.setFormatter(formatter) # logger.addHandler(fh) # logger.debug('start') # input parser parser = argparse.ArgumentParser( description=__doc__ ) parser.add_argument( '-i', '--inputfile', default=None, required=True, help='input file' ) parser.add_argument( '-d', '--debug', action='store_true', help='Debug mode') args = parser.parse_args() if args.debug: ch.setLevel(logging.DEBUG) app = QApplication(sys.argv) # form = LogViewerForm(args.inputfile, qapp=app) # form.show() list = QListView() list.setWindowTitle('nazev okna') list.setMinimumSize(600,400) model = LogEntryModel(args.inputfile, parent=list) list.setModel(model) watcher = QFileSystemWatcher([args.inputfile], parent=None) watcher.fileChanged['QString'].connect(model.slurp) list.show() app.exec_()
def main(): # # logger = logging.getLogger() # logger = logging.getLogger() logger.setLevel(logging.WARNING) ch = logging.StreamHandler() logger.addHandler(ch) # logger.debug('input params') default_data_file = os.path.join(path_to_script, "20130812_liver_volumetry.yaml") parser = argparse.ArgumentParser( description='Compare two segmentation. Evaluation is similar\ to MICCAI 2007 workshop. Metrics are described in\ www.sliver07.com/p7.pdf') parser.add_argument('-d', '--debug', action='store_true', help='run in debug mode', default=False) parser.add_argument('-si', '--sampleInput', action='store_true', help='generate sample intput data', default=False) parser.add_argument('-v', '--visualization', action='store_true', help='Turn on visualization', default=False) parser.add_argument('-y', '--inputYamlFile', help='input yaml file', default=default_data_file) parser.add_argument('-ds', '--directorySliver', help='input SLiver directory. If this and\ directoryPklz is not None, yaml file is generated', default=None) parser.add_argument('-dp', '--directoryPklz', help='input pklz directory', default=None) parser.add_argument('-o', '--outputfile', help='output file without extension', default='20130812_liver_volumetry') args = parser.parse_args() if args.debug: logger.setLevel(logging.DEBUG) logger.debug('debug mode logging') if args.sampleInput: sample_input_data() evaluate_and_write_to_file( args.inputYamlFile, args.directoryPklz, args.directorySliver, args.outputfile, args.visualization )
def main(): # logger = logging.getLogger() logger.setLevel(logging.DEBUG) ch = logging.StreamHandler() logger.addHandler(ch) # create file handler which logs even debug messages # fh = logging.FileHandler('log.txt') # fh.setLevel(logging.DEBUG) # formatter = logging.Formatter( # '%(asctime)s - %(name)s - %(levelname)s - %(message)s') # fh.setFormatter(formatter) # logger.addHandler(fh) # logger.debug('start') # input parser parser = argparse.ArgumentParser(description=__doc__) # parser.add_argument( # '-i', '--inputfile', # default=None, # required=True, # help='input file' # ) parser.add_argument( '--dict', default="{'jatra':2, 'ledviny':7}", # required=True, help='input dict') parser.add_argument('-d', '--debug', action='store_true', help='Debug mode') args = parser.parse_args() if args.debug: ch.setLevel(logging.DEBUG) app = QtWidgets.QApplication(sys.argv) # w = QtGui.QWidget() # w = DictEdit(dictionary={'jatra':2, 'ledviny':7}) w = DictEdit(dictionary=eval(args.dict)) w.resize(250, 150) w.move(300, 300) w.setWindowTitle('Simple') w.show() sys.exit(app.exec_())
def main(): ## logger = logging.getLogger() # logger = logging.getLogger() logger.setLevel(logging.WARNING) ch = logging.StreamHandler() logger.addHandler(ch) #logger.debug('input params') parser = argparse.ArgumentParser( description='Compute features on liver and other tissue.') parser.add_argument('-si', '--sampleInput', action='store_true', help='generate sample intput data', default=False) parser.add_argument('-v', '--visualization', action='store_true', help='Turn on visualization', default=False) parser.add_argument('-i', '--input', help='input yaml file', default="20130919_liver_statistics.yaml") parser.add_argument('-o', '--output', help='output file', default="20130919_liver_statistics_results.pkl") parser.add_argument('-t', '--train', help='Training', default=False, action='store_true' ) args = parser.parse_args() if args.sampleInput: sample_input_data() # input parser #path_to_yaml = os.path.join(path_to_script, args.input) path_to_yaml = args.input #write_csv(fvall) list_of_feature_fcn = [feat_hist] from sklearn import svm from sklearn.naive_bayes import GaussianNB list_of_classifiers = [svm.SVC, GaussianNB] tile_shape = [1, 100, 100] featrs_plus_classifs = make_product_list(list_of_feature_fcn, list_of_classifiers) result = experiment(path_to_yaml, featrs_plus_classifs, tile_shape=tile_shape, visualization=args.visualization, train=args.train) # Ukládání výsledku do souboru output_file = os.path.join(path_to_script, args.output) misc.obj_to_file(result, output_file, filetype='pickle')
def main(): # logger = logging.getLogger() logger.setLevel(logging.DEBUG) ch = logging.StreamHandler() logger.addHandler(ch) # create file handler which logs even debug messages # fh = logging.FileHandler('log.txt') # fh.setLevel(logging.DEBUG) # formatter = logging.Formatter( # '%(asctime)s - %(name)s - %(levelname)s - %(message)s') # fh.setFormatter(formatter) # logger.addHandler(fh) # logger.debug('start') # input parser parser = argparse.ArgumentParser(description=__doc__) parser.add_argument( '-i', '--inputfile', default=None, # required=True, help='input file') parser.add_argument('-d', '--debug', action='store_true', help='Debug mode') parser.add_argument('-lm', '--liver-model', action='store_true', help='Train liver model') parser.add_argument('-ll', '--liver-localizator', action='store_true', help='Train liver localizator') parser.add_argument('--all', action='store_true', help='Train all') args = parser.parse_args() if args.debug: ch.setLevel(logging.DEBUG) if args.liver_localizator or args.all: organ_localizator.train_liver_localizator_from_sliver_data() if args.liver_model or args.all: organ_model.train_liver_model_from_sliver_data()
def __new__(cls, *args, **kwargs): print(args) filename, fmt, level = args logger = logging.getLogger(__name__) format_str = logging.Formatter(fmt) # 设置日志格式 logger.setLevel(level) # 设置日志级别 sh = logging.StreamHandler() # 往屏幕上输出 sh.setFormatter(format_str) # 设置屏幕上显示的格式 th = handlers.TimedRotatingFileHandler( filename=filename, when="D", backupCount=3, encoding='utf-8' ) # 往文件里写入#指定间隔时间自动生成文件的处理器 th.setFormatter(format_str) # 设置文件里写入的格式 logger.addHandler(sh) # 把对象加到logger里 logger.addHandler(th) return logger
def main(): # logger = logging.getLogger() logger.setLevel(logging.DEBUG) ch = logging.StreamHandler() logger.addHandler(ch) # create file handler which logs even debug messages # fh = logging.FileHandler('log.txt') # fh.setLevel(logging.DEBUG) # formatter = logging.Formatter( # '%(asctime)s - %(name)s - %(levelname)s - %(message)s') # fh.setFormatter(formatter) # logger.addHandler(fh) # logger.debug('start') # input parser parser = argparse.ArgumentParser(description=__doc__) parser.add_argument( '-i', '--inputfile', default=None, # required=True, help='input file') parser.add_argument('-d', '--debug', action='store_true', help='Debug mode') args = parser.parse_args() if args.debug: ch.setLevel(logging.DEBUG) import sys app = QtWidgets.QApplication(sys.argv) login = Login() if login.exec_() == QtWidgets.QDialog.Accepted: window = Window() window.show() sys.exit(app.exec_())
def main(): ## logger = logging.getLogger() # logger = logging.getLogger() logger.setLevel(logging.WARNING) ch = logging.StreamHandler() logger.addHandler(ch) #logger.debug('input params') ## read confguraton from file, use default values from OrganSegmentation # input parser parser = argparse.ArgumentParser( description=' Show dicom data with overlay') parser.add_argument('-i', '--inputdatapath', default='', help='path to data dir') args_obj = parser.parse_args() args = vars(args_obj) #print args["arg"] reader = datareader.DataReader() data3d, metadata = reader.Get3DData(args['inputdatapath'], qt_app=None, dataplus_format=False) overlays = reader.get_overlay() overlay = np.zeros(data3d.shape, dtype=np.int8) print("overlays ", list(overlays.keys())) for key in overlays: overlay += overlays[key] #import ipdb; ipdb.set_trace() # BREAKPOINT pyed = sed3.sed3(data3d, contour=overlay) pyed.show() #print savestring # import pdb; pdb.set_trace() return
def main(): # logger = logging.getLogger() logger.setLevel(logging.DEBUG) ch = logging.StreamHandler() logger.addHandler(ch) # create file handler which logs even debug messages # fh = logging.FileHandler('log.txt') # fh.setLevel(logging.DEBUG) # formatter = logging.Formatter( # '%(asctime)s - %(name)s - %(levelname)s - %(message)s') # fh.setFormatter(formatter) # logger.addHandler(fh) # logger.debug('start') # input parser parser = argparse.ArgumentParser(description=__doc__) # parser.add_argument( # '-i', '--inputfile', # default=None, # required=True, # help='input file' # ) parser.add_argument('-o', '--outputfile', default="~/lisa_data/liver.ol.p", help='output file') parser.add_argument('-d', '--debug', action='store_true', help='Debug mode') args = parser.parse_args() if args.debug: ch.setLevel(logging.DEBUG) train_liver_localizator_from_sliver_data(args.outputfile)
def disable_aiopika_logger(): """ 禁用 aio-pika 的日志 调用此函数之后可以屏蔽掉 aio-pika ``WARNING`` 等级以下的日志输出""" loggers = ( aio_pika.channel.log, aio_pika.robust_channel.log, aio_pika.connection.log, aio_pika.robust_connection.log, aio_pika.exchange.log, aio_pika.robust_exchange.log, aio_pika.queue.log, aio_pika.robust_queue.log, aio_pika.pool.log, aio_pika.message.log, aio_pika.patterns.rpc.log, aio_pika.patterns.master.log, ) for logger in loggers: logger.setLevel(WARNING)
from pprint import pformat from xml.etree import ElementTree import requests import typer from azure.core.exceptions import ClientAuthenticationError from azure.identity import UsernamePasswordCredential from azure.mgmt.resource import ResourceManagementClient, SubscriptionClient from loguru import logger from riskmap import ROOT_DIR from riskmap.mappings import AttckMapper from riskmap.reports import RiskmapReportGenerator logger = logging.getLogger("azure") logger.setLevel(logging.ERROR) app = typer.Typer() atmap = AttckMapper( Path(ROOT_DIR) / "cti", Path(".") / "logs" / "riskcli.json") class CommandUnsuccessfulError(Exception): pass @app.command() @atmap.mapping(enterprise=["T1110.001", "M1032"]) def azbrute( username: str = typer.Option(..., "-u", help="Username to bruteforce"),
def main(): # # logger = logging.getLogger() # logger = logging.getLogger() logger.setLevel(logging.WARNING) ch = logging.StreamHandler() logger.addHandler(ch) # logger.debug('input params') parser = argparse.ArgumentParser( description='Compute features on liver and other tissue.') parser.add_argument('-tr', '--training_yaml_path', help='Input yaml file.' + " You can check sample with -si parameter.", default="20130919_liver_statistics.yaml") parser.add_argument('-te', '--testing_yaml_path', help='Input yaml file.' + " You can check sample with -si parameter.", default=None) parser.add_argument('-si', '--sampleInput', action='store_true', help='generate sample intput data', default=False) parser.add_argument('-v', '--visualization', action='store_true', help='Turn on visualization', default=False) parser.add_argument('-fc', '--features_classifs', action='store_true', help='Read features and classifs list from file', default=False) parser.add_argument('-o', '--output', help='output file', default="20130919_liver_statistics_results.pkl") parser.add_argument('-t', '--train', help='Training', default=False, action='store_true') parser.add_argument( '-cl', '--classifers', help='classifer by string: "SVC", or "GaussianNB", ...', nargs='+', type=str, default=['SVC']) parser.add_argument('-fe', '--features', help='features by string: "hist", or "glcm", ...', nargs='+', type=str, default=['hist']) args = parser.parse_args() if args.sampleInput: sample_input_data() # input parser # path_to_yaml = os.path.join(path_to_script, args.input) # training_yaml_path = args.training_yaml_path # testing_yaml_path = args.testing_yaml_path if args.testing_yaml_path is None: print('testing is same as training') args.testing_yaml_path = args.training_yaml_path # write_csv(fvall) gf = tfeat.GaborFeatures() # noqa glcmf = tfeat.GlcmFeatures() # noqa haralick = tfeat.HaralickFeatures() # noqa list_of_feature_fcn = prepared_texture_features_by_string(args.features) list_of_classifiers = prepared_classifiers_by_string(args.classifers) tile_shape = [10, 50, 50] if args.features_classifs: import features_classifs featrs_plus_classifs = features_classifs.fc else: featrs_plus_classifs = make_product_list(list_of_feature_fcn, list_of_classifiers) result = experiment(args.training_yaml_path, args.testing_yaml_path, featrs_plus_classifs, tile_shape=tile_shape, use_voxelsize_norm=True, working_voxelsize_mm=[1, 1, 1], visualization=args.visualization, train=args.train) # Ukládání výsledku do souboru output_file = os.path.join(path_to_script, args.output) misc.obj_to_file(result, output_file, filetype='pickle')
def main(): # # logger = logging.getLogger() logger.setLevel(logging.WARNING) ch = logging.StreamHandler() logger.addHandler(ch) #logger.debug('input params') # input parser parser = argparse.ArgumentParser( description= 'Segment vessels from liver \n \npython organ_segmentation.py\n \n\ python organ_segmentation.py -mroi -vs 0.6') parser.add_argument('-d', '--get_sample_data', action='store_true', default=False, help='Get sample data') parser.add_argument('-i', '--install', action='store_true', default=False, help='Install') parser.add_argument('-icn', '--make_icon', action='store_true', default=False, help='Creates desktop icon, works only in ubuntu') parser.add_argument('-g', '--get_git', action='store_true', default=False, help='Get git in windows') parser.add_argument('--build_gco', action='store_true', default=False, help='Build gco_python in windows. Problematic step.') args = parser.parse_args() # if args.get_sample_data == False and args.install == False and args.build_gco == False: ## default setup is install and get sample data # args.get_sample_data = True # args.install = True # args.build_gco = False if args.get_sample_data: import lisa import lisa.dataset lisa.dataset.get_sample_data() # get_sample_data() if args.make_icon: make_icon() if args.install: print('Installing system environment') if sys.platform.startswith('linux'): subprocess.call('./envinstall/envubuntu.sh') #submodule_update() elif sys.platform.startswith('win'): windows_install() if args.build_gco: windows_build_gco() else: windows_get_gco() if args.get_git: windows_get_git()
def main(): ## logger = logging.getLogger() # logger = logging.getLogger() logger.setLevel(logging.WARNING) ch = logging.StreamHandler() logger.addHandler(ch) #logger.debug('input params') # input parser parser = argparse.ArgumentParser( description='Segment vessels from liver \n\ \npython organ_segmentation.py\n\ \npython organ_segmentation.py -mroi -vs 0.6') parser.add_argument('-dd', '--dcmdir', default=None, help='path to data dir') parser.add_argument('-d', '--debug', action='store_true', help='run in debug mode') parser.add_argument('-vs', '--voxelsize_mm', default='3', type=str, help='Insert working voxelsize. It can be number or \ array of three numbers. \n \ -vs 3 \n \ -vs [3,3,5]') parser.add_argument('-mroi', '--manualroi', action='store_true', help='manual crop before data processing') parser.add_argument('-iparams', '--iparams', default=None, help='filename of ipars file with stored interactivity') parser.add_argument('-t', '--tests', action='store_true', help='run unittest') parser.add_argument('-tx', '--textureanalysis', action='store_true', help='run with texture analysis') parser.add_argument('-exd', '--exampledata', action='store_true', help='run unittest') parser.add_argument('-ed', '--editdata', action='store_true', help='Run data editor') parser.add_argument('-so', '--show_output', action='store_true', help='Show output data in viewer') parser.add_argument( '-ss', '--segmentation_smoothing', action='store_true', help='Smoothing of output segmentation', default=False ) args = parser.parse_args() # voxelsize_mm can be number or array args.voxelsize_mm = np.array(eval(args.voxelsize_mm)) if args.debug: logger.setLevel(logging.DEBUG) if args.tests: # hack for use argparse and unittest in one module sys.argv[1:] = [] unittest.main() sys.exit() if args.exampledata: args.dcmdir = '../sample_data/\ matlab/examples/sample_data/DICOM/digest_article/' if args.iparams is not None: args.iparams = misc.obj_from_file(args.iparams) #else: #dcm_read_from_dir('/home/mjirik/data/medical/data_orig/46328096/') #data3d, metadata = dcmreaddata.dcm_read_from_dir() oseg = OrganSegmentation(args.dcmdir, working_voxelsize_mm=args.voxelsize_mm, manualroi=args.manualroi, texture_analysis=args.textureanalysis, edit_data=args.editdata, smoothing=args.action_segmentation_smoothing, iparams=args.iparams ) oseg.interactivity() #igc = pycut.ImageGraphCut(data3d, zoom = 0.5) #igc.interactivity() #igc.make_gc() #igc.show_segmentation() # volume #volume_mm3 = np.sum(oseg.segmentation > 0) * np.prod(oseg.voxelsize_mm) print( "Volume " + str(oseg.get_segmented_volume_size_mm3() / 1000000.0) + ' [l]') #pyed = sed3.sed3(oseg.data3d, contour = # oseg.segmentation) #pyed.show() if args.show_output: oseg.show_output() savestring = raw_input('Save output data? (y/n): ') #sn = int(snstring) if savestring in ['Y', 'y']: data = oseg.export() misc.obj_to_file(data, "organ.pkl", filetype='pickle') misc.obj_to_file(oseg.get_ipars(), 'ipars.pkl', filetype='pickle') #output = segmentation.vesselSegmentation(oseg.data3d, # oseg.orig_segmentation) def checkSegData(self): if self.segmentation_data is None: self.statusBar().showMessage('No SEG data!') return nzs = self.segmentation_data.nonzero() nn = nzs[0].shape[0] if nn > 0: aux = ' voxels = %d, volume = %.2e mm3' % (nn, nn * self.voxel_volume) self.setLabelText(self.text_seg_data, aux) self.setLabelText(self.text_mesh_in, 'segmentation data') self.statusBar().showMessage('Ready') else: self.statusBar().showMessage('Zero SEG data!') def autoSeg(self): if self.dcm_3Ddata is None: self.statusBar().showMessage('No DICOM data!') return igc = pycut.ImageGraphCut(self.dcm_3Ddata, voxelsize=self.voxel_size_mm) pyed = QTSeedEditor(self.dcm_3Ddata, seeds=self.segmentation_seeds, modeFun=igc.interactivity_loop, voxelVolume=self.voxel_volume) pyed.exec_() self.segmentation_data = pyed.getContours() self.segmentation_seeds = pyed.getSeeds() self.checkSegData()
print_function) #, unicode_literals) import platform import uproot import pandas as pd from tqdm.auto import tqdm # Progressbar import argparse if float(platform.python_version()[0:3]) >= 3.5: from loguru import logger else: import logging logger = logging.getLogger(__file__) FORMAT = "[%(name)s:%(funcName)s: line %(lineno)s] - %(levelname)s - %(message)s" logging.basicConfig(format=FORMAT) logger.setLevel(logging.DEBUG) def parse_args(): # Required parser = argparse.ArgumentParser() parser.add_argument('-r', '--run_list', nargs='+', type=int, help='can be a list of one...', required=True) parser.add_argument( '-f', '--common_file_format', help='''