Example #1
0
                                         '../../data/length-of-stay/'))
parser.add_argument(
    '--output_dir',
    type=str,
    help='Directory relative which all output files are stored',
    default='.')
args = parser.parse_args()
print(args)

if args.small_part:
    args.save_every = 2**30

# Build readers, discretizers, normalizers
if args.deep_supervision:
    train_data_loader = common_utils.DeepSupervisionDataLoader(
        dataset_dir=os.path.join(args.data, 'train'),
        listfile=os.path.join(args.data, 'train_listfile.csv'),
        small_part=args.small_part)
    val_data_loader = common_utils.DeepSupervisionDataLoader(
        dataset_dir=os.path.join(args.data, 'train'),
        listfile=os.path.join(args.data, 'val_listfile.csv'),
        small_part=args.small_part)
else:
    train_reader = LengthOfStayReader(
        dataset_dir=os.path.join(args.data, 'train'),
        listfile=os.path.join(args.data, 'train_listfile.csv'))
    val_reader = LengthOfStayReader(
        dataset_dir=os.path.join(args.data, 'train'),
        listfile=os.path.join(args.data, 'val_listfile.csv'))

discretizer = Discretizer(timestep=args.timestep,
                          store_masks=True,
parser = argparse.ArgumentParser()
common_utils.add_common_arguments(parser)
parser.add_argument('--deep_supervision',
                    dest='deep_supervision',
                    action='store_true')
parser.set_defaults(deep_supervision=False)
args = parser.parse_args()
print args

if args.small_part:
    args.save_every = 2**30

# Build readers, discretizers, normalizers
if args.deep_supervision:
    train_data_loader = common_utils.DeepSupervisionDataLoader(
        dataset_dir='../../data/decompensation/train/',
        listfile='../../data/decompensation/train_listfile.csv',
        small_part=args.small_part)
    val_data_loader = common_utils.DeepSupervisionDataLoader(
        dataset_dir='../../data/decompensation/train/',
        listfile='../../data/decompensation/val_listfile.csv',
        small_part=args.small_part)
else:
    train_reader = DecompensationReader(
        dataset_dir='../../data/decompensation/train/',
        listfile='../../data/decompensation/train_listfile.csv')
    val_reader = DecompensationReader(
        dataset_dir='../../data/decompensation/train/',
        listfile='../../data/decompensation/val_listfile.csv')

discretizer = Discretizer(timestep=args.timestep,
                          store_masks=True,
Example #3
0
                    action='store_true')
parser.set_defaults(deep_supervision=False)
parser.add_argument('--partition',
                    type=str,
                    default='custom',
                    help="log, custom, none")
args = parser.parse_args()
print args

if args.small_part:
    args.save_every = 2**30

# Build readers, discretizers, normalizers
if args.deep_supervision:
    train_data_loader = common_utils.DeepSupervisionDataLoader(
        dataset_dir='../../data/length-of-stay/train/',
        listfile='../../data/length-of-stay/train_listfile.csv',
        small_part=args.small_part)
    val_data_loader = common_utils.DeepSupervisionDataLoader(
        dataset_dir='../../data/length-of-stay/train/',
        listfile='../../data/length-of-stay/val_listfile.csv',
        small_part=args.small_part)
else:
    train_reader = LengthOfStayReader(
        dataset_dir='../../data/length-of-stay/train/',
        listfile='../../data/length-of-stay/train_listfile.csv')
    val_reader = LengthOfStayReader(
        dataset_dir='../../data/length-of-stay/train/',
        listfile='../../data/length-of-stay/val_listfile.csv')

discretizer = Discretizer(timestep=args.timestep,
                          store_masks=True,
Example #4
0
    sources.append('structured_data')
    experiment_name=experiment_name+'structured_'
if args.weighted:
    experiment_name=experiment_name+'weighted_'
if args.condensed:
    experiment_name=experiment_name+'condensed_'

if args.small_part:
    args.save_every = 2**30



# Build readers, discretizers, normalizers
if args.deep_supervision:
    train_data_loader = common_utils.DeepSupervisionDataLoader(dataset_dir=os.path.join(args.data, 'train'),
                                                               listfile=os.path.join(args.data, 'train_listfile.csv'),
                                                               small_part=args.small_part, sources=sources, timesteps=args.timesteps, condensed=args.condensed)
    val_data_loader = common_utils.DeepSupervisionDataLoader(dataset_dir=os.path.join(args.data, 'train'),
                                                             listfile=os.path.join(args.data, 'val_listfile.csv'),
                                                             small_part=args.small_part, sources=sources, timesteps=args.timesteps, condensed=args.condensed)
else:
    train_reader = LengthOfStayReader(dataset_dir=os.path.join(args.data, 'train'),
                                      listfile=os.path.join(args.data, 'train_listfile.csv'), sources=sources, timesteps=args.timesteps, condensed=args.condensed)
    val_reader = LengthOfStayReader(dataset_dir=os.path.join(args.data, 'train'),
                                    listfile=os.path.join(args.data, 'val_listfile.csv'), sources=sources, timesteps=args.timesteps, condensed=args.condensed)

train_reader = LengthOfStayReader(dataset_dir=os.path.join(args.data, 'train'),
                                      listfile=os.path.join(args.data, 'train_listfile.csv'), sources=sources, timesteps=args.timesteps, condensed=args.condensed)
    
reader_header = train_reader.read_example(0)['header']
n_bins = len(train_reader.read_example(0))