def slicer(patientDir): ''' Split NRRD CT Scan file into 64/64/64 disjoint cubes for inference ''' multiple = 4 desired = np.multiply(IN_SIZE, multiple) newshape = np.array(IN_SIZE) cuber = Cubify(oldshape=desired, newshape=newshape) sliceDir = patientDir + "sliced/" if not os.path.exists(sliceDir): os.mkdir(sliceDir) patient = patientDir + "/orig.nrrd" scan = sitk.ReadImage(patient) scan = sitk.GetArrayFromImage(scan) shape = np.array(scan.shape) # crop the image to fit desired --- NOTE WE ARE LOSING INFO (EDGE) HERE BE CAREFUL excess = np.abs(desired - shape) excess1 = excess / 2 excess2 = excess - excess1 scan = scan[excess1[0]:-excess2[0], excess1[1]:-excess2[1], excess1[2]:-excess2[2]] scan = pad(scan, desired) sitk.WriteImage(sitk.GetImageFromArray(scan), sliceDir + "orig.nrrd") scan = cuber.cubify(scan) nCubes = scan.shape[0] for arrNo in range(nCubes): wp = sliceDir + "sliced_{0}.bin".format(arrNo) scan[arrNo].tofile(wp) paths = glob.glob(sliceDir + "sliced_[0-9].bin") paths = [os.path.abspath(path) for path in paths] y = ["dummy.bin" for x in paths] csv = pd.DataFrame({"x": paths, "y": y}) csv.to_csv(sliceDir + "csv.csv", index=0)
def grouper(patientDir): multiple = 4 desired = np.multiply(IN_SIZE, multiple) newshape = np.array(IN_SIZE) cuber = Cubify(oldshape=desired, newshape=newshape) patientDir += "sliced/" nCubes = len(glob.glob(patientDir + "sliced_*_yPred.bin")) scan = np.empty((nCubes, IN_SIZE[0], IN_SIZE[1], IN_SIZE[2])) for i in xrange(nCubes): path = patientDir + "sliced_{0}_yPred.bin".format(i) img = np.fromfile(path, dtype=np.float32).reshape(IN_SIZE) scan[i] = img scan = np.array(scan) scan = cuber.uncubify(scan) mass = scan.sum() mass.tofile(patientDir + "predictedMass.bin") sitk.WriteImage(sitk.GetImageFromArray(scan), patientDir + "predicted.nrrd") # Original image orig = sitk.ReadImage(patientDir + "../orig.nrrd") orig = sitk.GetArrayFromImage(orig) desired = np.array(orig.shape) difference = desired - np.array(scan.shape) difference = difference / 2 try: padding = ((difference[0], difference[0]), (difference[1], difference[1]), (difference[2], difference[2])) scanPad = np.pad(scan, padding, "constant") sitk.WriteImage(sitk.GetImageFromArray(scanPad), patientDir + "predictedPad.nrrd") except ValueError: print("Couldn't pad this one")
# # Cubify Tutorial Part 2 # # This tutorial shows you how to use CubeSets # from cubify import Cubify import json # Instantiate Cubify cubify= Cubify() # # Do cleanup from previous runs of this tutorial # cubify.deleteCubeSet('purchasesCubeSet') cubify.deleteCubeSet('purchasesCubeSet2') # Create a cube set called 'purchasesCubeSet' (with automatic binning) cubeSet = cubify.createCubeSet('tutorial', 'purchasesCubeSet', 'purchases.csv') print "" print "CubeSet purchasesCubeSet created successfully" print "" cubeRows = cubify.getSourceCubeRows(cubeSet) binnedCubeRows = cubify.getBinnedCubeRows(cubeSet) print "" print "Cube rows in purchasesCubeSet's source cube:" for cubeRow in cubeRows:
# # This tutorial shows you how to use Cubify to: # # 1. Create a cube # 2. Export a cube # 3. Query cube rows # 4. Add columns to a cube # 5. Bin a cube # 6. Aggregate a cube # from cubify import Cubify import json # Instantiate Cubify cubify= Cubify() # # Do cleanup from previous runs of this tutorial # cubify.deleteCube('purchases') cubify.deleteCube('purchases_autobinned_1') cubify.deleteCube('purchases_autobinned_2') cubify.deleteCube('purchases_binned_1') cubify.deleteCube('purchases_binned_2') cubify.deleteCube('purchases_binned_2_CustomerId') cubify.deleteCube('purchases_binned_2_CustomerState-ProductCategory') cubify.deleteCube('purchases_binned_2_ProductId') cubify.deleteCube('purchases_binned_2_CustomerId-TransactionDate') cubify.deleteCube('purchases_binned_2_agg1') cubify.deleteCube('purchases_binned_2_agg2')