예제 #1
0
 def __init__(self, **kwargs):
     Struct.__init__(self)
     for k, v, in dict(
             name=None,
             shape=None,
             scale=80.,
             roots=[],
             seed=0,
             latticetype="quad",
             num_hierarchical_iterations=0,
             tip_radius_arterial=3.5,
             tip_radius_capi=3.5,
             tip_radius_vein=4.5,
             max_sprout_radius_artery=8.,
             max_sprout_radius_vein=
             8.,  # these are the max radi to which capillaries can connect
             murray_alpha_vein=3.,
             murray_alpha_artery=3.,
             capillariesUntilLevel=0,
             o2range=300.,
             outfilename='unnamed',
             full_debug_output=False,
             ensemble_index=0,
             generate_more_capillaries=False,
             calcflow=None,
             num_threads=1,
             changeRateThreshold=
             1.e-3,  # this may be too low for 2d configurations!!!
     ).iteritems():
         self[k] = kwargs.pop(k, v)
     assert not kwargs
     self.__setattr__ = VD.checked_setattr
예제 #2
0
class VesselData(object):
    def __init__(self):
        self.data = Struct(rBV_by_iter=[],
                           radii_prob=[],
                           lengths_by_rad=[],
                           lengths_prob=[],
                           num_branches_by_rad=[])

    def add(self, group):
        for name, datas in self.data.iteritems():
            ds = group[name]
            datas.append(np.asarray(ds))

    def getAvgCurve(self, name):
        return np.average(self.data[name], axis=0)

    def __getitem__(self, name):
        return self.data[name]
예제 #3
0
def analyzeTumorShape(dataset,
                      do_curvature=True,
                      do_integrals=True,
                      do_linecomps=False,
                      contourspec=('S', 0.)):
    out = Struct()
    #generate tumorsurface
    contour_name, contour_value = contourspec
    assert contour_value in (0., 0.5)
    dataset.GetPointData().SetActiveScalars(contour_name)
    iso = vtkcommon.vtkContour(dataset, contour_value)
    # measurement
    if do_linecomps:
        lc, pts = vtkcommon.vtkGetLineComponents(iso)
        out['lines'] = lc
        out['points'] = pts
    if do_curvature:
        # curvature
        tmp = vtkcommon.vtkCurvature(iso, 9)
        out['curvature'] = np.asarray(vtkcommon.fromVtkArray(
            tmp.GetPointData().GetArray("curvature")),
                                      dtype=np.float32)
        del tmp
    if do_integrals:
        # integrate volume (area, length)
        _, _, out['iso_area'] = vtkcommon.vtkIntegrateData(iso)
        # integrated data
        cd, pd, volume = vtkcommon.vtkIntegrateData(dataset)
        cd = dict((dataset.GetCellData().GetArray(i).GetName(), cd[i])
                  for i in xrange(dataset.GetCellData().GetNumberOfArrays()))
        pd = dict((dataset.GetPointData().GetArray(i).GetName(), pd[i])
                  for i in xrange(dataset.GetPointData().GetNumberOfArrays()))
        data = cd.copy()
        data.update(pd)
        out['tumor_volume'] = (
            data[contour_name] +
            volume) * 0.5 if contour_value == 0 else data[contour_name]
        out['radius_estimate'] = math.sqrt(out.tumor_volume / math.pi)
        out['area_estimate'] = 2. * math.pi * out.radius_estimate
    return out
  size = (401, 41, 41),
  scale = 10.,  # gives 4 x 0.4 x 0.4 mm^3 length
  pgrad = PressureGradientFromFlowRate(3.3e6, 50., _paramsbf),
  r = 50.,
  ld_type = 'quad',
  direction_mode = 0,
)

basecase = namedtuple('Params', 'paramsbf, paramspo2, paramsTube')(_paramsbf, _paramspo2, _paramstube)

basecase.paramspo2['massTransferCoefficientModelNumber'] = 1
basecase.paramspo2['conductivity_coeff1'] = 7.2
basecase.paramspo2['conductivity_coeff2'] = 4.0
basecase.paramspo2['conductivity_coeff3'] = 0.0

literature_cases = Struct()

# this is supposed to reproduce the cases from nair 1989
# where o2 diffuses from a perfused pipe through a block
# of rubber.
nair_release = deepcopy(basecase)
nair_release.paramsTube['size'] = (401, 19, 19)
nair_release.paramsTube['r'] = 27*0.5   # FUUUU ... paper gives diameter, must convert to radius
nair_release.paramsTube['pgrad'] = PressureGradientFromFlowRate(3.3e6, 27.*0.5, _paramsbf)  # flow rate of 12 ul /hr
nair_release.paramspo2['tissue_po2_boundary_condition'] = 'dirichlet_yz'
nair_release.paramspo2['po2init_r0'] = 160
nair_release.paramspo2['po2init_cutoff'] = 160
nair_release.paramspo2['mmcons_m0_tum'] = 0
nair_release.paramspo2['mmcons_m0_norm'] = 0
nair_release.paramspo2['solubility_tissue'] = 1
nair_release.paramspo2['D_tissue'] = 0.94
예제 #5
0
	def trunc_disk_ops(inode, initial_size, final_size, append_micro_op = None):
		toret = []

		if initial_size == final_size:
			print 'Warning: trunc_disk_ops called for the same initial and final size, ' + str(initial_size) 
			return toret

		# If we are making the file smaller, follow the same algorithm
		# as making the file bigger. But, exchange the initial_size and
		# final_size in the beginning, and then reverse the final
		# output list.
		invert = False
		if initial_size > final_size:
			t = initial_size
			initial_size = final_size
			final_size = t
			invert = True

		if append_micro_op:
			assert not invert
			assert append_micro_op.inode == inode
			assert append_micro_op.offset == initial_size
			assert append_micro_op.count == (final_size - initial_size)

		start = initial_size
		remaining = final_size - initial_size
		if split_mode == 'count':
			per_slice_size = int(math.ceil(float(remaining) / splits))

		end = 0
		while remaining > 0:
			if split_mode == 'aligned':
				count = min(splits - (start % splits), remaining)
			else:
				count = min(per_slice_size, remaining)
			end = count + start

			if invert:
				# Actually truncate
				disk_op = Struct(op = 'truncate', inode = inode, initial_size = start, final_size = end)
				toret.append(disk_op)

			if append_micro_op:
				# Write zeros
				disk_op = Struct(op = 'write', inode = inode, offset = start, dump_offset = 0, count = count, dump_file = None, override_data = None, special_write = 'ZEROS')
				toret.append(disk_op)

			if not invert:
				# Write garbage
				disk_op = Struct(op = 'write', inode = inode, offset = start, dump_offset = 0, count = count, dump_file = None, override_data = None, special_write = 'GARBAGE')
				toret.append(disk_op)

			if (not invert) and not append_micro_op:
				# Write zeros
				disk_op = Struct(op = 'write', inode = inode, offset = start, dump_offset = 0, count = count, dump_file = None, override_data = None, special_write = 'ZEROS')
				toret.append(disk_op)

			if append_micro_op:
				# Write data
				dump_offset = append_micro_op.dump_offset + (start - append_micro_op.offset)
				disk_op = Struct(op = 'write', inode = inode, offset = start, dump_offset = dump_offset, count = count, dump_file = append_micro_op.dump_file, special_write = None)
				toret.append(disk_op)
	
			remaining -= count
			start = end

		assert end == final_size

		if invert == True:
			toret.reverse()
			for disk_op in toret:
				t = disk_op.initial_size
				disk_op.initial_size = disk_op.final_size
				disk_op.final_size = t

		return toret
예제 #6
0
def get_disk_ops(line, splits, split_mode, expanded_atomicity):
	assert split_mode in ['aligned', 'count']
	def trunc_disk_ops(inode, initial_size, final_size, append_micro_op = None):
		toret = []

		if initial_size == final_size:
			print 'Warning: trunc_disk_ops called for the same initial and final size, ' + str(initial_size) 
			return toret

		# If we are making the file smaller, follow the same algorithm
		# as making the file bigger. But, exchange the initial_size and
		# final_size in the beginning, and then reverse the final
		# output list.
		invert = False
		if initial_size > final_size:
			t = initial_size
			initial_size = final_size
			final_size = t
			invert = True

		if append_micro_op:
			assert not invert
			assert append_micro_op.inode == inode
			assert append_micro_op.offset == initial_size
			assert append_micro_op.count == (final_size - initial_size)

		start = initial_size
		remaining = final_size - initial_size
		if split_mode == 'count':
			per_slice_size = int(math.ceil(float(remaining) / splits))

		end = 0
		while remaining > 0:
			if split_mode == 'aligned':
				count = min(splits - (start % splits), remaining)
			else:
				count = min(per_slice_size, remaining)
			end = count + start

			if invert:
				# Actually truncate
				disk_op = Struct(op = 'truncate', inode = inode, initial_size = start, final_size = end)
				toret.append(disk_op)

			if append_micro_op:
				# Write zeros
				disk_op = Struct(op = 'write', inode = inode, offset = start, dump_offset = 0, count = count, dump_file = None, override_data = None, special_write = 'ZEROS')
				toret.append(disk_op)

			if not invert:
				# Write garbage
				disk_op = Struct(op = 'write', inode = inode, offset = start, dump_offset = 0, count = count, dump_file = None, override_data = None, special_write = 'GARBAGE')
				toret.append(disk_op)

			if (not invert) and not append_micro_op:
				# Write zeros
				disk_op = Struct(op = 'write', inode = inode, offset = start, dump_offset = 0, count = count, dump_file = None, override_data = None, special_write = 'ZEROS')
				toret.append(disk_op)

			if append_micro_op:
				# Write data
				dump_offset = append_micro_op.dump_offset + (start - append_micro_op.offset)
				disk_op = Struct(op = 'write', inode = inode, offset = start, dump_offset = dump_offset, count = count, dump_file = append_micro_op.dump_file, special_write = None)
				toret.append(disk_op)
	
			remaining -= count
			start = end

		assert end == final_size

		if invert == True:
			toret.reverse()
			for disk_op in toret:
				t = disk_op.initial_size
				disk_op.initial_size = disk_op.final_size
				disk_op.final_size = t

		return toret

	def unlink_disk_ops(parent, inode, name, size, hardlinks, entry_type = TYPE_FILE):
		toret = []
		if hardlinks == 1:
			toret += trunc_disk_ops(inode, size, 0)
		disk_op = Struct(op = 'delete_dir_entry', parent = parent, entry = name, inode = inode, entry_type = entry_type) # Inode stored, Vijay hack
		toret.append(disk_op)
		return toret
	def link_disk_ops(parent, inode, name, mode = None, entry_type = TYPE_FILE):
		return [Struct(op = 'create_dir_entry', parent = parent, entry = name, inode = inode, mode = mode, entry_type = entry_type)]

	if line.op == 'creat':
		line.hidden_disk_ops = link_disk_ops(line.parent, line.inode, line.name, line.mode)
	elif line.op == 'unlink':
		line.hidden_disk_ops = unlink_disk_ops(line.parent, line.inode, line.name, line.size, line.hardlinks)
	elif line.op == 'link':
		line.hidden_disk_ops = link_disk_ops(line.dest_parent, line.source_inode, line.dest)
	elif line.op == 'rename':
		line.hidden_disk_ops = []
		# source: source_inode, dest: dest_inode
		if expanded_atomicity:
			line.hidden_disk_ops += unlink_disk_ops(line.source_parent, line.source_inode, line.source, line.source_size, 2)
		# source: None, dest: dest_inode
		if line.dest_hardlinks >= 1:
			line.hidden_disk_ops += unlink_disk_ops(line.dest_parent, line.dest_inode, line.dest, line.dest_size, line.dest_hardlinks)
		# source: None, dest: None
		if expanded_atomicity:
			line.hidden_disk_ops += link_disk_ops(line.source_parent, line.source_inode, line.source)
		# source: source_inode, dest: None
		line.hidden_disk_ops += unlink_disk_ops(line.source_parent, line.source_inode, line.source, line.source_size, 2) # Setting hardlinks as 2 so that trunc does not happen
		# source: None, dest: None
		line.hidden_disk_ops += link_disk_ops(line.dest_parent, line.source_inode, line.dest)
		# source: None, dest: source_inode
		if expanded_atomicity:
			line.hidden_disk_ops += link_disk_ops(line.source_parent, line.source_inode, line.source)
		# source: source_inode, dest: source_inode
		if expanded_atomicity:
			line.hidden_disk_ops += unlink_disk_ops(line.source_parent, line.source_inode, line.source, line.source_size, 2) # Setting hardlinks as 2 so that trunc does not happen
		# source: None, dest: source_inode
	elif line.op == 'trunc':
		line.hidden_disk_ops = trunc_disk_ops(line.inode, line.initial_size, line.final_size)
	elif line.op == 'append':
		line.hidden_disk_ops = trunc_disk_ops(line.inode, line.offset, line.offset + line.count, line)
		if expanded_atomicity:
			line.hidden_disk_ops += trunc_disk_ops(line.inode, line.offset, line.offset + line.count, line)
	elif line.op == 'write':
		assert line.count > 0
		line.hidden_disk_ops = []

		offset = line.offset
		remaining = line.count
		if split_mode == 'count':
			per_slice_size = int(math.ceil(float(line.count) / splits))

		while remaining > 0:
			if split_mode == 'aligned':
				count = min(splits - (offset % splits), remaining)
			else:
				count = min(per_slice_size, remaining)

			dump_offset = line.dump_offset + (offset - line.offset)
			disk_op = Struct(op = 'write', inode = line.inode, offset = offset, dump_offset = dump_offset, count = count, dump_file = line.dump_file, override_data = None, special_write = None)
			line.hidden_disk_ops.append(disk_op)
			remaining -= count
			offset += count
	elif line.op == 'mkdir':
		line.hidden_disk_ops = link_disk_ops(line.parent, line.inode, line.name, eval(line.mode), TYPE_DIR)
	elif line.op == 'rmdir':
		line.hidden_disk_ops = unlink_disk_ops(line.parent, line.inode, line.name, 0, 0, TYPE_DIR)
	elif line.op in ['fsync', 'fdatasync', 'file_sync_range']:
		line.hidden_disk_ops = []
		if line.op in ['fsync', 'fdatasync']:
			offset = 0
			count = line.size
		else:
			offset = line.offset
			count = line.count
		disk_op = Struct(op = 'sync', inode = line.inode, offset = offset, count = count)
		line.hidden_disk_ops.append(disk_op)
	elif line.op in ['stdout', 'stderr']:
		line.hidden_disk_ops = [Struct(op = line.op, data = line.data)]
	else:
		assert False

	cnt = 0
	for disk_op in line.hidden_disk_ops:
		disk_op.hidden_omitted = False
		disk_op.hidden_id = cnt
		disk_op.hidden_micro_op = line
		cnt += 1
	return line.hidden_disk_ops
예제 #7
0
 def __init__(self):
     self.data = Struct(rBV_by_iter=[],
                        radii_prob=[],
                        lengths_by_rad=[],
                        lengths_prob=[],
                        num_branches_by_rad=[])
예제 #8
0
 def checked_setattr(self, k, v):
     assert k in self
     Struct.__setattr__(self, k, v)
예제 #9
0
import os, sys
import h5py
import os, sys
import posixpath
import numpy as np
import collections
import matplotlib
import matplotlib.pyplot as plt
import mpl_utils
import extensions

from mystruct import Struct
import myutils

dataconfigs = [
    Struct(name='kdiff_cells', rng='auto', title=r'kdiff_cells'),
    Struct(name='kdiff_obst', rng='auto', title=r'kdiff_obst'),
    Struct(name='necro', rng=(-0.1, 1.1), title=r'necro'),
    Struct(name='totalconc', rng=None, title=r'vol. ratio $\phi + m$'),
    Struct(name='conc', rng=(-0.1, 1.1), title=r'vol. ratio $\phi$'),
    Struct(name='conc_necro', rng=(-0.1, 1.1), title=r'vol. ratio $d$'),
    Struct(name='obstacle', rng=(-0.1, 1.1), title=r'obst. ratio $m$'),
    Struct(name='oxy', rng=(-0.1, 1.1), title=r'O$_2\, c$'),
    Struct(name='oxy_sources', rng=(-0.1, 1.1), title=r'$\Gamma_c$'),
    Struct(name='fieldOxy', rng=(-0.1, 1.1), title=r'O$_2$'),
    Struct(name='vel_0', rng='auto', coords='xface', title=r'$v_x$'),
    Struct(name='ls', rng='zero-centered', title=r'$\theta$'),
    Struct(name='vel', rng='zero-centered', title=r'$v_x$'),
    Struct(name='press', rng='auto', title=r'pressure $\Sigma$'),
    Struct(name='ptc', rng=(0., 1.), title=r'$\Theta_\epsilon(\theta)$'),
    Struct(name='sources',
예제 #10
0
def H5PrintOpt():
    return Struct(recursive=False,
                  print_full_dataset=0,
                  print_attributes=False,
                  print_eval=None)