def activation_mask(self, layer, unit, index, shape=None, use_fieldmap=True): if shape is None: record, shape = self.instance_data(index) sw, sh = shape # reduction = int(round(self.iw / float(sw))) lp = self.layer[layer] blobdata = lp.blobdata fieldmap = lp.fieldmap quantdata = lp.quantdata threshold = quantdata[unit, int(round(quantdata.shape[1] * lp.level))] if use_fieldmap: up = upsample.upsampleL(fieldmap, blobdata[index:index + 1, unit], shape=(self.ih, self.iw), scaleshape=(sh, sw))[0] else: upsample_f = nn.Upsample(size=(self.ih, self.iw), mode='bilinear') up = numpy.squeeze( upsample_f( Variable( torch.Tensor( blobdata[index:index + 1, unit]).unsqueeze(0))).data.cpu().numpy()) mask = up > threshold return mask
def activation_mask(lp, unit, index, shape): blobdata = lp.blobdata fieldmap = lp.fieldmap quantdata = lp.quantdata threshold = quantdata[unit, int(round(quantdata.shape[1] * lp.level))] up = upsample.upsampleL(fieldmap, blobdata[index:index + 1, unit], shape=lp.input_dim, scaleshape=shape)[0] mask = up > threshold return mask
def activation_mask(self, layer, unit, index, shape=None): if shape is None: record, shape = self.instance_data(index) sw, sh = shape # reduction = int(round(self.iw / float(sw))) lp = self.layer[layer] blobdata = lp.blobdata fieldmap = lp.fieldmap quantdata = lp.quantdata threshold = quantdata[unit, int(round(quantdata.shape[1] * lp.level))] up = upsample.upsampleL(fieldmap, blobdata[index:index + 1, unit], shape=(self.ih, self.iw), scaleshape=(sh, sw))[0] mask = up > threshold return mask
def process_data(fn_t, fn_read, shape, tally_depth, ds, iw, ih, categories, fieldmap, thresh, labelcat, batch_size, ahead, verbose, thread, start, end): unit_size = len(thresh) blobdata = cached_memmap(fn_read, mode='r', dtype='float32', shape=shape) count_t = cached_memmap(fn_t, mode='r+', dtype='int32', shape=(ds.size(), tally_depth, 3)) count_t[...] = 0 # The main loop if verbose: print 'Beginning work for evaluating', blob pf = loadseg.SegmentationPrefetcher(ds, categories=categories, start=start, end=end, once=True, batch_size=batch_size, ahead=ahead, thread=False) index = start start_time = time.time() last_batch_time = start_time batch_size = 0 for batch in pf.batches(): batch_time = time.time() rate = (index - start) / (batch_time - start_time + 1e-15) batch_rate = batch_size / (batch_time - last_batch_time + 1e-15) last_batch_time = batch_time if verbose: print 'labelprobe index', index, 'items per sec', batch_rate, rate sys.stdout.flush() for rec in batch: sw, sh = [rec[k] for k in ['sw', 'sh']] reduction = int(round(iw / float(sw))) up = upsample.upsampleL(fieldmap, blobdata[index], shape=(sh, sw), reduction=reduction) mask = up > thresh accumulate_counts(mask, [rec[cat] for cat in categories], count_t[index], unit_size, labelcat) index += 1 batch_size = len(batch) count_t.flush()