def make_pressure_color_arrays(vesselgraph): edges = vesselgraph.edgelist flags = vesselgraph.edges['flags'] data = vesselgraph.nodes['pressure'] num_nodes = len(vesselgraph.nodes['position']) nflags = krebsutils.edge_to_node_property(num_nodes, edges, flags, 'or') is_set = lambda flags_, flag: np.asarray(np.bitwise_and(flags_, flag), np. bool) gray = np.asarray((0.1, 0.1, 0.1)) lgray = np.asarray((0.5, 0.5, 0.5)) circulated = is_set(flags, krebsutils.CIRCULATED) ncirculated = is_set(nflags, krebsutils.CIRCULATED) capillary = is_set(flags, krebsutils.CAPILLARY) ncapillary = is_set(nflags, krebsutils.CAPILLARY) p0 = np.amin(data[ncirculated]) p1 = np.amax(data[ncirculated]) cm = matplotlib.cm.ScalarMappable(cmap=cm_redblue) cm.set_clim(p0, p1) edgedata = np.average((data[edges[:, 0]], data[edges[:, 1]]), axis=0) edgecolors = cm.to_rgba(edgedata)[:, :3] edgecolors[~circulated] = gray #edgecolors[capillary] = lgray nodecolors = cm.to_rgba(data)[:, :3] nodecolors[~ncirculated] = gray #nodecolors[ncapillary] = lgray vesselgraph.edges['colors'] = edgecolors vesselgraph.nodes['colors'] = nodecolors
def plots(self,x): figax= train_data.plot(color='b',alpha=0.1,s=10) train_data.plot(lambda x: self.catastrophe, color='r',alpha=1,s=20,figax=figax) plt.savefig('../results/outliers.png') for i in xrange(6): crap = numpy.sort(self.dm.data_dm().x[:,i]) crap= crap[len(crap)*.1:len(crap)*.9] sig = crap.std() cm=matplotlib.cm.ScalarMappable( norm=matplotlib.colors.Normalize(vmin=crap[len(crap)/2]-5*sig, vmax=crap[len(crap)/2]+5*sig),cmap='Spectral') cval=cm.to_rgba(self.dm.data_dm().x[:,i]) figax= train_data.plot(c=cval,alpha=0.3,s=20,cmap=cm) figax[0].suptitle(str(i)) plt.savefig('../results/splits.'+str(i)+'.png') figax= self.dm.data_dm().plot(color='r',alpha=0.1,s=10,ndim=6) self.dm.data_dm().plot(lambda x: self.catastrophe, color='b',alpha=0.1,s=20,ndim=6,figax=figax) plt.savefig('../results/temp.png') figax= self.dm.data_dm().plot(color='r',alpha=0.1,s=10,nsig=20,ndim=6) self.dm.data_dm().plot(lambda x: self.catastrophe, color='b',alpha=0.2,s=20,ndim=6,figax=figax) plt.savefig('../results/temp2.png') cm=matplotlib.cm.ScalarMappable(cmap='rainbow') cval=cm.to_rgba(self.weight(x.x)) figax= x.plot(c=cval,alpha=0.2,s=20,cmap=cm,vmin=0,vmax=cval.max()) plt.savefig('../results/color_dm.png')
def write_kml(filename, data, col, cmap): ns = 'http://www.opengis.net/kml/2.2' ns_prefix = '{' + ns + '}' kml = ET.Element(ns_prefix + 'kml') document = ET.SubElement(kml, ns_prefix + 'Document') document_name = ET.SubElement(document, ns_prefix + 'name') document_name.text = OUT_HEADER[col] document_name.tail = '\n' norm = matplotlib.colors.Normalize(vmin=data[:,col].min(), vmax=data[:,col].max()) cm = matplotlib.cm.ScalarMappable(norm=norm, cmap=cmap) for i in range(data.shape[0] - 1): color = cm.to_rgba(data[i,col], bytes=True) color_string = f'{color[0]:02x}{color[1]:02x}{color[2]:02x}' placemark = ET.SubElement(document, ns_prefix + 'Placemark') placemark.tail = '\n' placemark_name = ET.SubElement(placemark, ns_prefix + 'name') placemark_name.text = f'{OUT_HEADER[col]} = {data[i,col]}' description = ET.SubElement(placemark, ns_prefix + 'description') description.text = f'time = {timestamp_to_string(data[i,OUT_COL_TIME])}' linestring = ET.SubElement(placemark, ns_prefix + 'LineString') coordinates = ET.SubElement(linestring, ns_prefix + 'coordinates') coordinates.text = '%f,%f %f,%f' % (data[i,OUT_COL_LON], data[i,OUT_COL_LAT], data[i+1,OUT_COL_LON], data[i+1,OUT_COL_LAT]) style = ET.SubElement(placemark, ns_prefix + 'Style') linestyle = ET.SubElement(style, ns_prefix + 'LineStyle') linecolor = ET.SubElement(linestyle, ns_prefix + 'color') linecolor.text = color_string width = ET.SubElement(linestyle, ns_prefix + 'width') width.text = '4' tree = ET.ElementTree(kml) with open(filename, 'wb') as outf: tree.write(outf, encoding='utf-8', xml_declaration=True, default_namespace=ns)
def matplotlibColormapToPovray(name, cm): import matplotlib.cm N = 256 lims = cm.get_clim() colors = cm.to_rgba(np.linspace(lims[0], lims[1], N)) colors = colors[:, :3] # skip alpha channel colors = np.power(colors, 2.4) return Colormap(name, lims, colors)
def get_attimg(image, attmap, cm=matplotlib.cm.ScalarMappable(cmap="jet")): h, w = image.size(1), image.size(2) s = attmap.size(-1) attmap = attmap.squeeze().view(s, s).data.cpu().numpy() attmap = resize(attmap, (h, w), mode='reflect') attmap = cm.to_rgba(attmap)[:, :, 0:3] attmap = torch.from_numpy(attmap).float().permute(2, 0, 1) return image + attmap
def run(self, workspace): header = ("Image", "Objects", "Bin # (innermost=1)", "Bin count", "Fraction", "Intensity", "COV") stats = [] d = {} for image in self.images: for o in self.objects: for bin_count_settings in self.bin_counts: stats += \ self.do_measurements(workspace, image.image_name.value, o.object_name.value, o.center_object_name.value if o.center_choice != C_SELF else None, o.center_choice.value, bin_count_settings, d) if self.wants_zernikes: self.calculate_zernikes(workspace) if self.show_window: workspace.display_data.header = header workspace.display_data.stats = stats workspace.display_data.heatmaps = [] for heatmap in self.heatmaps: heatmap_img = d.get(id(heatmap)) if heatmap_img is not None: if self.show_window or heatmap.wants_to_save_display: labels = workspace.object_set.get_objects( heatmap.object_name.get_objects_name()).segmented if self.show_window: workspace.display_data.heatmaps.append( (heatmap_img, labels != 0)) if heatmap.wants_to_save_display: colormap = heatmap.colormap.value if colormap == matplotlib.cm.gray.name: output_pixels = heatmap_img else: if colormap == cps.DEFAULT: colormap = cpprefs.get_default_colormap() cm = matplotlib.cm.ScalarMappable( cmap=colormap) output_pixels = cm.to_rgba(heatmap_img)[:, :, :3] output_pixels[labels == 0, :] = 0 parent_image = workspace.image_set.get_image( heatmap.image_name.get_image_name()) output_img = cpi.Image( output_pixels, parent_image=parent_image) img_name = heatmap.display_name.value workspace.image_set.add(img_name, output_img)
def run(self, workspace): header = ("Image", "Objects", "Bin # (innermost=1)", "Bin count", "Fraction", "Intensity", "COV") stats = [] d = {} for image in self.images: for o in self.objects: for bin_count_settings in self.bin_counts: stats += \ self.do_measurements(workspace, image.image_name.value, o.object_name.value, o.center_object_name.value if o.center_choice != C_SELF else None, o.center_choice.value, bin_count_settings, d) if self.wants_zernikes != Z_NONE: self.calculate_zernikes(workspace) if self.show_window: workspace.display_data.header = header workspace.display_data.stats = stats workspace.display_data.heatmaps = [] for heatmap in self.heatmaps: heatmap_img = d.get(id(heatmap)) if heatmap_img is not None: if self.show_window or heatmap.wants_to_save_display: labels = workspace.object_set.get_objects( heatmap.object_name.get_objects_name()).segmented if self.show_window: workspace.display_data.heatmaps.append( (heatmap_img, labels != 0)) if heatmap.wants_to_save_display: colormap = heatmap.colormap.value if colormap == matplotlib.cm.gray.name: output_pixels = heatmap_img else: if colormap == cps.DEFAULT: colormap = cpprefs.get_default_colormap() cm = matplotlib.cm.ScalarMappable( cmap=colormap) output_pixels = cm.to_rgba(heatmap_img)[:, :, :3] output_pixels[labels == 0, :] = 0 parent_image = workspace.image_set.get_image( heatmap.image_name.get_image_name()) output_img = cpi.Image( output_pixels, parent_image=parent_image) img_name = heatmap.display_name.value workspace.image_set.add(img_name, output_img)
def _compute_colors(cls, x): import matplotlib.cm # Get RGB color map proportional to the concentration. cm = matplotlib.cm.ScalarMappable() crgba = cm.to_rgba(x, bytes=True) # Convert RGB to HEX colors = [] for row in crgba: # get R,G,B of RGBA colors.append(_rgb_to_hex(tuple(list(row[0:3])))) # Convert Hex to Decimal for i, c in enumerate(colors): colors[i] = int(c, 0) return colors
def get(self): logging.info('GET self.request.body = {}'.format(self.request.body)) reqType = self.request.get('reqType') if reqType == 'getJobInfo': job = SpatialJobWrapper.get_by_id(int(self.request.get('id'))) if self.user.user_id() != job.user_id: self.response.headers['Content-Type'] = 'application/json' self.response.write({ "status": False, "msg": "Not the right user" }) result = {} stdout = '' stderr = '' complete = '' if job.outData is None: complete = 'yes' else: try: fstdoutHandle = open(str(job.outData + '/stdout.log'), 'r') stdout = fstdoutHandle.read() fstdoutHandle.close() fstderrHandle = open(str(job.outData + '/stderr.log'), 'r') stderr = fstderrHandle.read() fstderrHandle.close() if os.path.exists("{0}/results/complete".format( job.outData)): complete = 'yes' except IOError as e: traceback.print_exc() result['status'] = False result[ 'msg'] = 'Error running the simulation: stdout/stderr outputs missing.' result.update({ "id": int(self.request.get('id')), "jobStatus": job.status, "complete": complete, "resource": job.resource, "modelName": job.modelName, "outData": job.outData, "name": job.name, "uuid": job.cloudDatabaseID, "output_stored": job.output_stored, "stdout": stdout, "stderr": stderr, "indata": json.loads(job.indata) }) logging.debug("result =\n\n{}".format(pprint.pformat(result))) self.response.headers['Content-Type'] = 'application/json' self.response.write(json.dumps(result)) return elif reqType == 'getMeshData': try: job = SpatialJobWrapper.get_by_id(int(self.request.get('id'))) data = json.loads(self.request.get('data')) logging.debug("data = {}".format(data)) trajectory = data["trajectory"] timeIdx = data["timeIdx"] resultJS = {} #if not job.preprocessed or not os.path.exists(job.preprocessedDir): job.preprocess(trajectory) indir = job.preprocessedDir with open(os.path.join(indir, 'mesh.json'), 'r') as meshfile: mesh = json.load(meshfile) with open(os.path.join(indir, 'voxelTuples.json'), 'r') as voxelTuplesFile: voxelTuples = json.load(voxelTuplesFile) f = os.path.join(indir, 'result{0}'.format(trajectory)) with h5py.File(f, 'r') as dataFile: species = dataFile.keys() self.response.content_type = 'application/json' self.response.write( json.dumps({ "mesh": mesh, "voxelTuples": voxelTuples, "species": species })) except Exception as e: traceback.print_exc() result = {} result['status'] = False result['msg'] = 'Error: error fetching results {0}'.format(e) self.response.headers['Content-Type'] = 'application/json' self.response.write(json.dumps(result)) return elif reqType == 'getTimeSeriesData': try: job = SpatialJobWrapper.get_by_id(int(self.request.get('id'))) data = json.loads(self.request.get('data')) logging.debug( 'patial.get(onlyColorRange): data={0}'.format(data)) trajectory = data["trajectory"] sTime = data["timeStart"] eTime = data["timeEnd"] #TODO: what is the right value here? if eTime is None: eTime = 0 dataType = "population" if "showPopulation" in data and data[ "showPopulation"] else "concentration" resultJS = {} if job.preprocessed is None or trajectory not in job.preprocessed or not os.path.exists( job.preprocessedDir): job.preprocess(trajectory) f = os.path.join(job.preprocessedDir, 'result{0}'.format(trajectory)) limits = {} logging.debug( 'Spatial.get(onlyColorRange): sTime={0} eTime={0}'.format( sTime, eTime)) with h5py.File(f, 'r') as dataFile: dataTmp = {} colorTmp = {} for specie in dataFile.keys(): data2 = dataFile[specie][dataType][sTime:eTime + 1] dataTmp[specie] = data2 limits[specie] = { 'min': dataFile[specie][dataType].attrs['min'], 'max': dataFile[specie][dataType].attrs['max'] } cm.set_clim(dataFile[specie][dataType].attrs['min'], dataFile[specie][dataType].attrs['max']) rgbas = cm.to_rgba(data2, bytes=True).astype('uint32') rgbas = numpy.left_shift( rgbas[:, :, 0], 16) + numpy.left_shift( rgbas[:, :, 1], 8) + rgbas[:, :, 2] #rgbaInts = numpy.zeros((rgbas.shape[0], rgbas.shape[1])) #for i in range(rgbas.shape[0]): # for j in range(rgbas.shape[1]): # rgbaInts[i, j] = int('0x%02x%02x%02x' % tuple(rgbas[i, j][0:3]), 0) colorTmp[specie] = [] for i in range(rgbas.shape[0]): colorTmp[specie].append( list(rgbas[i].astype('int'))) colors = {} data = {} for i in range(abs(eTime - sTime + 1)): colors[sTime + i] = {} data[sTime + i] = {} for specie in dataFile.keys(): colors[sTime + i][specie] = colorTmp[specie][i] data[sTime + i][specie] = list(dataTmp[specie][i]) self.response.content_type = 'application/json' self.response.write( json.dumps({ "colors": colors, "raw": data, "limits": limits })) except Exception as e: traceback.print_exc() result = {} result['status'] = False result['msg'] = 'Error: error fetching results {0}'.format(e) self.response.headers['Content-Type'] = 'application/json' self.response.write(json.dumps(result)) return self.render_response('spatial.html')
def make_any_color_arrays(vesselgraph, data_name): edges = vesselgraph.edgelist num_nodes = len(vesselgraph.nodes['position']) flags = vesselgraph.edges['flags'] flags = np.asarray(flags, dtype='uint32') nflags = krebsutils.edge_to_node_property(num_nodes, edges, flags, 'or') mask = myutils.bbitwise_and(flags, krebsutils.CIRCULATED) nmask = myutils.bbitwise_and(nflags, krebsutils.CIRCULATED) if data_name in vesselgraph.edges: edgedata = vesselgraph.edges[data_name] nodedata = krebsutils.edge_to_node_property(num_nodes, edges, edgedata, 'avg') else: nodedata = vesselgraph.nodes[data_name] edgedata = np.average((nodedata[edges[:, 0]], nodedata[edges[:, 1]]), axis=0) gray = np.asarray((0.1, 0.1, 0.1)) edgecolors = np.repeat(gray.reshape(1, -1), len(edgedata), axis=0) nodecolors = np.repeat(gray.reshape(1, -1), len(nodedata), axis=0) #colors = lambda arr: cm.to_rgba(arr)[:,:3] colors = lambda arr: np.power(cm.to_rgba(arr)[:, :3], 2.4) if data_name == 'hematocrit': cm = matplotlib.cm.ScalarMappable(cmap=cm_hematocrit) cm.set_clim(0, 1) unmapped_range = (0., 1.) edgecolors[mask] = colors(edgedata[mask]) nodecolors[nmask] = colors(nodedata[nmask]) elif data_name == 'pressure': #this looks really ugly if there is a zero pressure node #p0 = np.amin(nodedata) p0 = np.min(nodedata[np.nonzero(nodedata)]) p1 = np.amax(nodedata) unmapped_range = (p0, p1) cm = matplotlib.cm.ScalarMappable(cmap=cm_redblue) cm.set_clim(p0, p1) edgecolors[mask] = colors(edgedata[mask]) nodecolors[nmask] = colors(nodedata[nmask]) elif data_name == 'shearforce': mask = mask & (edgedata > 0) nmask = nmask & (nodedata > 0) edgedata = edgedata[mask] nodedata = nodedata[nmask] unmapped_range = edgedata.min(), edgedata.max() edgedata = np.log10(edgedata) nodedata = np.log10(nodedata) p0 = -4 #np.amin(edgedata) p1 = -1 #np.amax(edgedata) cm = matplotlib.cm.ScalarMappable(cmap=matplotlib.cm.spectral) cm.set_clim(p0, p1) edgecolors[mask] = colors(edgedata) nodecolors[nmask] = colors(nodedata) elif data_name == 'S_tot': #mask = mask & (edgedata>0) #nmask = nmask & (nodedata>0) edgedata = edgedata[mask] nodedata = nodedata[nmask] unmapped_range = edgedata.min(), edgedata.max() p0 = np.amin(edgedata) p1 = np.amax(edgedata) #print("p0: %f, p1: %f" % (p0,p1)) #unmapped_range = (p0, p1) cm = matplotlib.cm.ScalarMappable(cmap=matplotlib.cm.jet) cm.set_clim(p0, p1) #edgedata = np.log10(edgedata) #nodedata = np.log10(nodedata) #p0 = -4#np.amin(edgedata) #p1 = -1#np.amax(edgedata) #cm = matplotlib.cm.ScalarMappable(cmap=matplotlib.cm.spectral) #cm.set_clim(p0, p1) edgecolors[mask] = colors(edgedata) nodecolors[nmask] = colors(nodedata) elif data_name == 'flow': mask = mask & (edgedata > 0) nmask = nmask & (nodedata > 0) edgedata = edgedata[mask] nodedata = nodedata[nmask] unmapped_range = edgedata.min(), edgedata.max() edgedata = np.log10(edgedata) nodedata = np.log10(nodedata) p0 = np.floor(np.amin(edgedata)) p1 = np.ceil(np.amax(edgedata)) cm = matplotlib.cm.ScalarMappable(cmap=matplotlib.cm.jet) cm.set_clim(p0, p1) edgecolors[mask] = colors(edgedata) nodecolors[nmask] = colors(nodedata) elif data_name == 'conductivitySignal': edgedata = edgedata[mask] nodedata = nodedata[nmask] unmapped_range = edgedata.min(), edgedata.max() p0 = np.amin(edgedata) p1 = np.amax(edgedata) cm = matplotlib.cm.ScalarMappable(cmap=matplotlib.cm.jet) cm.set_clim(p0, p1) edgecolors[mask] = colors(edgedata) nodecolors[nmask] = colors(nodedata) elif data_name == 'metabolicSignal': edgedata = edgedata[mask] nodedata = nodedata[nmask] unmapped_range = edgedata.min(), edgedata.max() p0 = np.amin(edgedata) p1 = np.amax(edgedata) cm = matplotlib.cm.ScalarMappable(cmap=matplotlib.cm.jet) cm.set_clim(p0, p1) edgecolors[mask] = colors(edgedata) nodecolors[nmask] = colors(nodedata) elif data_name == 'flags': edgecolors[mask & (flags & krebsutils.ARTERY).astype(np.bool)] = np.asarray( (1., 0., 0.)) nodecolors[nmask & (nflags & krebsutils.ARTERY).astype(np.bool)] = np.asarray( (1., 0., 0.)) edgecolors[mask & (flags & krebsutils.VEIN).astype(np.bool)] = np.asarray( (0., 0., 1.)) nodecolors[nmask & (nflags & krebsutils.VEIN).astype(np.bool)] = np.asarray( (0., 0., 1.)) edgecolors[mask & (flags & krebsutils.CAPILLARY).astype(np.bool)] = np.asarray( (0., 1., 0.)) nodecolors[nmask & (nflags & krebsutils.CAPILLARY).astype(np.bool)] = np.asarray( (0., 1., 0.)) for idx in vesselgraph.roots: nodecolors[idx] = np.asarray((1., 1., 0.)) cm, unmapped_range = None, (None, None) vesselgraph.edges['colors'] = edgecolors vesselgraph.nodes['colors'] = nodecolors return cm, unmapped_range
def get(self): logging.info('GET self.request.body = {}'.format(self.request.body)) reqType = self.request.get('reqType') if reqType == 'getJobInfo': job = SpatialJobWrapper.get_by_id(int(self.request.get('id'))) if self.user.user_id() != job.user_id: self.response.headers['Content-Type'] = 'application/json' self.response.write({ "status" : False, "msg" : "Not the right user" }) result = {} stdout = '' stderr = '' complete = '' if job.outData is None: complete = 'yes' else: try: fstdoutHandle = open(str(job.outData + '/stdout.log'), 'r') stdout = fstdoutHandle.read() fstdoutHandle.close() fstderrHandle = open(str(job.outData + '/stderr.log'), 'r') stderr = fstderrHandle.read() fstderrHandle.close() if os.path.exists("{0}/results/complete".format(job.outData)): complete = 'yes' except IOError as e: traceback.print_exc() result['status'] = False result['msg'] = 'Error running the simulation: stdout/stderr outputs missing.' result.update({"id" : int(self.request.get('id')), "jobStatus" : job.status, "complete" : complete, "resource" : job.resource, "modelName" : job.modelName, "outData" : job.outData, "name" : job.name, "uuid": job.cloudDatabaseID, "output_stored": job.output_stored, "stdout" : stdout, "stderr" : stderr, "indata" : json.loads(job.indata) }) logging.debug("result =\n\n{}".format(pprint.pformat(result))) self.response.headers['Content-Type'] = 'application/json' self.response.write(json.dumps(result)) return elif reqType == 'getMeshData': try: job = SpatialJobWrapper.get_by_id(int(self.request.get('id'))) data = json.loads(self.request.get('data')) logging.debug("data = {}".format(data)) trajectory = data["trajectory"] timeIdx = data["timeIdx"] resultJS = {} #if not job.preprocessed or not os.path.exists(job.preprocessedDir): job.preprocess(trajectory) indir = job.preprocessedDir with open(os.path.join(indir, 'mesh.json') ,'r') as meshfile: mesh = json.load(meshfile) with open(os.path.join(indir, 'voxelTuples.json') ,'r') as voxelTuplesFile: voxelTuples = json.load(voxelTuplesFile) f = os.path.join(indir, 'result{0}'.format(trajectory)) with h5py.File(f, 'r') as dataFile: species = dataFile.keys() self.response.content_type = 'application/json' self.response.write(json.dumps({ "mesh" : mesh, "voxelTuples" : voxelTuples, "species" : species })) except Exception as e: traceback.print_exc() result = {} result['status'] = False result['msg'] = 'Error: error fetching results {0}'.format(e) self.response.headers['Content-Type'] = 'application/json' self.response.write(json.dumps(result)) return elif reqType == 'getTimeSeriesData': try: job = SpatialJobWrapper.get_by_id(int(self.request.get('id'))) data = json.loads(self.request.get('data')) logging.debug('patial.get(onlyColorRange): data={0}'.format(data)) trajectory = data["trajectory"] sTime= data["timeStart"] eTime = data["timeEnd"] #TODO: what is the right value here? if eTime is None: eTime = 0 dataType = "population" if "showPopulation" in data and data["showPopulation"] else "concentration" resultJS = {} if job.preprocessed is None or trajectory not in job.preprocessed or not os.path.exists(job.preprocessedDir): job.preprocess(trajectory) f = os.path.join(job.preprocessedDir, 'result{0}'.format(trajectory)) limits = {} logging.debug('Spatial.get(onlyColorRange): sTime={0} eTime={0}'.format(sTime,eTime)) with h5py.File(f, 'r') as dataFile: dataTmp = {} colorTmp = {} for specie in dataFile.keys(): data2 = dataFile[specie][dataType][sTime:eTime + 1] dataTmp[specie] = data2 limits[specie] = { 'min' : dataFile[specie][dataType].attrs['min'], 'max' : dataFile[specie][dataType].attrs['max'] } cm.set_clim(dataFile[specie][dataType].attrs['min'], dataFile[specie][dataType].attrs['max']) rgbas = cm.to_rgba(data2, bytes = True).astype('uint32') rgbas = numpy.left_shift(rgbas[:, :, 0], 16) + numpy.left_shift(rgbas[:, :, 1], 8) + rgbas[:, :, 2] #rgbaInts = numpy.zeros((rgbas.shape[0], rgbas.shape[1])) #for i in range(rgbas.shape[0]): # for j in range(rgbas.shape[1]): # rgbaInts[i, j] = int('0x%02x%02x%02x' % tuple(rgbas[i, j][0:3]), 0) colorTmp[specie] = [] for i in range(rgbas.shape[0]): colorTmp[specie].append(list(rgbas[i].astype('int'))) colors = {} data = {} for i in range(abs(eTime - sTime + 1)): colors[sTime + i] = {} data[sTime + i] = {} for specie in dataFile.keys(): colors[sTime + i][specie] = colorTmp[specie][i] data[sTime + i][specie] = list(dataTmp[specie][i]) self.response.content_type = 'application/json' self.response.write(json.dumps( { "colors" : colors, "raw" : data, "limits" : limits } )) except Exception as e: traceback.print_exc() result = {} result['status'] = False result['msg'] = 'Error: error fetching results {0}'.format(e) self.response.headers['Content-Type'] = 'application/json' self.response.write(json.dumps(result)) return self.render_response('spatial.html')
def InsertGraphColors(vesselgraph, po2field, data_name): edges = vesselgraph.edgelist num_nodes = len(vesselgraph.nodes['position']) if data_name in vesselgraph.edges: edgedata = data = vesselgraph.edges[data_name] nodedata = krebsutils.edge_to_node_property(num_nodes, edges, data, 'avg') else: nodedata = data = vesselgraph.nodes[data_name] edgedata = np.average((data[edges[:, 0]], data[edges[:, 1]]), axis=0) if data_name == 'po2vessels': try: p1 = np.amax(data) except ValueError: print("p1 not found") pass if po2field is not None: p1 = max(p1, np.amax(po2field)) try: p0 = np.amin(data) except ValueError: print("p0 not found") pass if po2field is not None: p0 = min(p0, np.amin(po2field)) #p1 = math.ceil(p1/10.0)*10.0 # round to powers of something #p1 = 100.0 value_range = (p0, p1) cm = matplotlib.cm.ScalarMappable(cmap=cm_po2) elif data_name == 'saturation': cm = matplotlib.cm.ScalarMappable(cmap=matplotlib.cm.spectral) vesselgraph.edges['saturation'] value_range = (np.min(vesselgraph.edges['saturation']), np.max(vesselgraph.edges['saturation'])) #value_range = (0,1.) elif data_name == 'hboconc': cm = matplotlib.cm.ScalarMappable(cmap=matplotlib.cm.gnuplot) p1 = math.ceil(np.amax(data)) value_range = (0., p1) cm.set_clim(*value_range) colors = lambda arr: np.power(cm.to_rgba(arr)[:, :3], 2.4) if data_name in vesselgraph.edges: edgecolors = colors(data) nodecolors = colors(nodedata) else: edgecolors = colors(edgedata) nodecolors = colors(data) flags = vesselgraph.edges['flags'] nflags = krebsutils.edge_to_node_property(num_nodes, edges, flags, 'or') is_not_set = lambda flags_, flag: np.bitwise_not( np.asarray(np.bitwise_and(flags_, flag), np.bool)) gray = np.asarray((0.3, 0.3, 0.3)) uncirculated = is_not_set(flags, krebsutils.CIRCULATED) nuncirculated = is_not_set(nflags, krebsutils.CIRCULATED) edgecolors[uncirculated] = gray nodecolors[nuncirculated] = gray print 'colormap range ', cm.get_clim() vesselgraph.edges['colors'] = edgecolors vesselgraph.nodes['colors'] = nodecolors return cm
# Compile images cm = matplotlib.cm.ScalarMappable(None, cmap='plasma') cm.set_clim(0.00, 0.08) disparities = np.load(os.path.join(cfg['disp_path'], cfg['disp_fmt'].format(cfg['filenames_index'])), mmap_mode='r') full_image = np.zeros((2 * len(cfg['sets']) * cfg['image_size'][0], len(cfg['distances']) * cfg['image_size'][1], 3), dtype=np.uint8) for si, s in enumerate(cfg['sets']): print('Set: {}'.format(s)) imgs = np.zeros((cfg['image_size'][0], len(cfg['distances']) * cfg['image_size'][1], 3), dtype=np.uint8) disps = np.zeros_like(imgs) for di, d in enumerate(cfg['distances']): fn = cfg['filename_fmt'].format(set=s, scene=cfg['scene'], object=cfg['object'], distance=d) mask_fn = cfg['mask_fmt'].format(set=s, scene=cfg['scene'], object=cfg['object'], distance=d) img = imread(os.path.join(cfg['data_path'], fn)) img = img_as_ubyte(resize(img, cfg['image_size'])) imgs[:, (di * cfg['image_size'][1]):((di + 1) * cfg['image_size'][1])] = img mask = imread(os.path.join(cfg['data_path'], mask_fn))[:, :, 0] mask = resize(mask, cfg['image_size']) c = find_contours(mask, 0.5)[0] disp = disparities[indices[(s, d)], :, :] disp = resize(disp, cfg['image_size']) disp = img_as_ubyte(cm.to_rgba(disp)[:, :, :3]) rr, cc = polygon_perimeter(c[:, 0], c[:, 1]) outline = np.zeros(cfg['image_size']) outline[rr, cc] = 1 outline = binary_dilation(outline, square(5)) disp[outline, :] = [255, 255, 255] disps[:, (di * cfg['image_size'][1]):((di + 1) * cfg['image_size'][1])] = disp full_image[((si * 2) * cfg['image_size'][0]):((si * 2 + 1) * cfg['image_size'][0]), :] = imgs full_image[((si * 2 + 1) * cfg['image_size'][0]):((si * 2 + 2) * cfg['image_size'][0]), :] = disps imsave(cfg['output_path'], full_image)
def make_any_color_arrays(vesselgraph, data_name): edges = vesselgraph.edgelist num_nodes = len(vesselgraph.nodes['position']) flags = vesselgraph.edges['flags'] nflags = krebsutils.edge_to_node_property(num_nodes, edges, flags, 'or') mask = myutils.bbitwise_and(flags, krebsutils.CIRCULATED) nmask = myutils.bbitwise_and(nflags, krebsutils.CIRCULATED) if data_name in vesselgraph.edges: edgedata = vesselgraph.edges[data_name] nodedata = krebsutils.edge_to_node_property(num_nodes, edges, edgedata, 'avg') else: nodedata = vesselgraph.nodes[data_name] edgedata = np.average((nodedata[edges[:, 0]], nodedata[edges[:, 1]]), axis=0) gray = np.asarray((0.1, 0.1, 0.1)) edgecolors = np.repeat(gray.reshape(1, -1), len(edgedata), axis=0) nodecolors = np.repeat(gray.reshape(1, -1), len(nodedata), axis=0) #colors = lambda arr: cm.to_rgba(arr)[:,:3] colors = lambda arr: np.power(cm.to_rgba(arr)[:, :3], 2.4) if data_name == 'hematocrit': cm = matplotlib.cm.ScalarMappable(cmap=cm_hematocrit) cm.set_clim(0, 1) unmapped_range = (0., 1.) edgecolors[mask] = colors(edgedata[mask]) nodecolors[nmask] = colors(nodedata[nmask]) elif data_name == 'pressure': p0 = np.amin(nodedata) p1 = np.amax(nodedata) unmapped_range = (p0, p1) cm = matplotlib.cm.ScalarMappable(cmap=cm_redblue) cm.set_clim(p0, p1) edgecolors[mask] = colors(edgedata[mask]) nodecolors[nmask] = colors(nodedata[nmask]) elif data_name == 'shearforce': mask = mask & (edgedata > 0) nmask = nmask & (nodedata > 0) edgedata = edgedata[mask] nodedata = nodedata[nmask] unmapped_range = edgedata.min(), edgedata.max() edgedata = np.log10(edgedata) nodedata = np.log10(nodedata) p0 = -4 #np.amin(edgedata) p1 = -1 #np.amax(edgedata) cm = matplotlib.cm.ScalarMappable(cmap=matplotlib.cm.spectral) cm.set_clim(p0, p1) edgecolors[mask] = colors(edgedata) nodecolors[nmask] = colors(nodedata) elif data_name == 'flow': mask = mask & (edgedata > 0) nmask = nmask & (nodedata > 0) edgedata = edgedata[mask] nodedata = nodedata[nmask] unmapped_range = edgedata.min(), edgedata.max() edgedata = np.log10(edgedata) nodedata = np.log10(nodedata) p0 = np.floor(np.amin(edgedata)) p1 = np.ceil(np.amax(edgedata)) cm = matplotlib.cm.ScalarMappable(cmap=matplotlib.cm.jet) cm.set_clim(p0, p1) edgecolors[mask] = colors(edgedata) nodecolors[nmask] = colors(nodedata) elif data_name == 'flags': unmapped_range = (0., 1.) mask = mask & (edgedata > 0) nmask = nmask & (nodedata > 0) edgedata = edgedata[mask] nodedata = nodedata[nmask] edgedata = np.bitwise_and(edgedata, krebsutils.ARTERY) cm = matplotlib.cm.ScalarMappable(cmap=matplotlib.cm.jet) edgecolors[mask] = colors(edgedata) nodecolors[nmask] = colors(nodedata) vesselgraph.edges['colors'] = edgecolors vesselgraph.nodes['colors'] = nodecolors return cm, unmapped_range