def prepare_scan(self, label=None): """ Prepare scan specific parameters and create a label if necessary """ if label is None: label = 'Scan%05d' % self.label_idx self.label_idx += 1 try: # return the scan if already prepared return self.scans[label] except KeyError: # get standarad parameters # Create a dictionary specific for the scan. scan = u.Param() self.scans[label] = scan scan.label = label # make a copy of model ditionary scan.pars = self.p.copy(depth=3) # Look for a scan-specific entry in the input parameters scan_specific_parameters = self.scans_pars.get(label, None) scan.pars.update(scan_specific_parameters, Replace=False) # prepare the tags t = scan.pars.tags if str(t) == t: scan.pars.tags = [tag.strip().lower() for tag in t.split(',')] # also create positions scan.pos_theory = xy.from_pars(scan.pars.xy) return scan
def prepare_scan(self,label=None): """ Prepare scan specific parameters and create a label if necessary """ if label is None: label = 'Scan%05d' % self.label_idx self.label_idx +=1 try: # return the scan if already prepared return self.scans[label] except KeyError: # get standarad parameters # Create a dictionary specific for the scan. scan = u.Param() self.scans[label] = scan scan.label = label # make a copy of model ditionary scan.pars = self.p.copy(depth=3) # Look for a scan-specific entry in the input parameters scan_specific_parameters = self.scans_pars.get(label, None) scan.pars.update(scan_specific_parameters,Replace=False) # prepare the tags t = scan.pars.tags if str(t)==t: scan.pars.tags = [tag.strip().lower() for tag in t.split(',')] # also create positions scan.pos_theory = xy.from_pars(scan.pars.xy) return scan
def new_data(self): """ Get all new diffraction patterns and create all views and pods accordingly. """ parallel.barrier() # Nothing to do if there are no new data. if not self.ptycho.datasource.data_available: return 'ok' logger.info('Processing new data.') used_scans = [] not_initialized = [] for dp in self.ptycho.datasource.feed_data(): """ A dp (data package) contains the following: common : dict or Param Meta information common to all datapoints in the data package. Variable names need to be consistent with those in the rest of ptypy package. (TODO further description) Important info: ------------------------ shape : (tuple or array) expected frame shape label : (string) a string signifying to which scan this package belongs iterable : An iterable structure that yields for each iteration a dict with the following fields: data : (np.2darray, float) diffraction data In MPI case, data can be None if distributed to other nodes mask : (np.2darray, bool) masked out areas in diffraction data array index : (int) diffraction datapoint index in scan position : (tuple or array) scan position """ meta = dp['common'] label = meta['label'] # we expect a string for the label. assert label == str(label) used_scans.append(label) logger.info('Importing data from %s as scan %s.' % (meta['label_original'], label)) # prepare scan dictionary or dig up the already prepared one scan = self.prepare_scan(label) scan.meta = meta # empty buffer scan.iterable = [] # prepare the scan geometry if not already done. if scan.get('geometries') is None: # ok now that we have meta we can check if the geometry fits scan.geometries = [] geo = scan.pars.geometry for key in ['lam', 'energy', 'psize_det', 'z', 'prop_type']: if geo[key] is None or scan.pars.if_conflict_use_meta: mk = scan.meta.get(key) if mk is not None: # None existing key or None values in meta dict are treated alike geo[key] = mk for ii, fac in enumerate(self.p.coherence.energies): geoID = geometry.Geo._PREFIX + '%02d' % ii + label g = geometry.Geo(self.ptycho, geoID, pars=geo) # now we fix the sample pixel size, This will make the frame size adapt g.p.psize_sam_is_fix = True g.energy *= fac scan.geometries.append(g) # create a buffer scan.iterable = [] scan.diff_views = [] scan.mask_views = [] # Remember the order in which these scans were fed the manager self.scan_labels.append(label) # Remember also that these new scans are probably not initialized yet not_initialized.append(label) # Buffer incoming data and evaluate if we got Nones in data for dct in dp['iterable']: dct['active'] = dct['data'] is not None scan.iterable.append(dct) # Ok data transmission is over for now. # Lets see what scans have received data and create the views for those for label in used_scans: # get scan Param scan = self.scans[label] # pick one of the geometries for calculating the frame shape geo = scan.geometries[0] sh = np.array(scan.meta.get('shape', geo.N)) # Storage generation if not already existing if scan.get('diff') is None: # this scan is brand new so we create storages for it scan.diff = self.ptycho.diff.new_storage(shape=(1, sh[-2], sh[-1]), psize=geo.psize_det, padonly=True, layermap=None) old_diff_views = [] old_diff_layers = [] else: # ok storage exists already. Views most likely also. Let's do some analysis and deactivate the old views old_diff_views = self.ptycho.diff.views_in_storage( scan.diff, active=False) old_diff_layers = [] for v in old_diff_views: old_diff_layers.append(v.layer) #v.active = False # same for mask if scan.get('mask') is None: scan.mask = self.ptycho.mask.new_storage(shape=(1, sh[-2], sh[-1]), psize=geo.psize_det, padonly=True, layermap=None) old_mask_views = [] old_mask_layers = [] else: old_mask_views = self.ptycho.mask.views_in_storage( scan.mask, active=False) old_mask_layers = [] for v in old_mask_views: old_mask_layers.append(v.layer) #v.active = False # Prepare for View genereation AR_diff_base = DEFAULT_ACCESSRULE.copy() AR_diff_base.shape = geo.N #None AR_diff_base.coord = 0.0 AR_diff_base.psize = geo.psize_det AR_mask_base = AR_diff_base.copy() AR_diff_base.storageID = scan.diff.ID AR_mask_base.storageID = scan.mask.ID diff_views = [] mask_views = [] positions = [] positions_theory = xy.from_pars(scan.pars.xy) for dct in scan.iterable: index = dct['index'] active = dct['active'] #tpos = positions_theory[index] if not scan.pars.if_conflict_use_meta and positions_theory is not None: pos = positions_theory[index] else: pos = dct.get('position') #,positions_theory[index]) AR_diff = AR_diff_base #.copy() AR_mask = AR_mask_base #.copy() AR_diff.layer = index AR_mask.layer = index # check here: is there already a view to this layer? Is it active? try: old_view = old_diff_views[old_diff_layers.index(index)] old_active = old_view.active old_view.active = active # also set this for the attached pods' exit views for pod in old_view.pods.itervalues(): pod.ex_view.active = active logger.debug( 'Diff view with layer/index %s of scan %s exists. \nSetting view active state from %s to %s' % (index, label, old_active, active)) except ValueError: v = View(self.ptycho.diff, accessrule=AR_diff, active=active) diff_views.append(v) logger.debug( 'Diff view with layer/index %s of scan %s does not exist. \nCreating view with ID %s and set active state to %s' % (index, label, v.ID, active)) # append position also positions.append(pos) try: old_view = old_mask_views[old_mask_layers.index(index)] old_view.active = active except ValueError: v = View(self.ptycho.mask, accessrule=AR_mask, active=active) mask_views.append(v) # so now we should have the right views to this storages. Let them reformat() # that will create the right sizes and the datalist access scan.diff.reformat() scan.mask.reformat() #parallel.barrier() #print 'this' for dct in scan.iterable: parallel.barrier() if not dct['active']: continue data = dct['data'] idx = dct['index'] scan.diff.datalist[idx][:] = data #.astype(scan.diff.dtype) scan.mask.datalist[idx][:] = dct.get( 'mask', np.ones_like(data)) #.astype(scan.mask.dtype) #del dct['data'] #print 'hallo' scan.diff.nlayers = parallel.MPImax(scan.diff.layermap) + 1 scan.mask.nlayers = parallel.MPImax(scan.mask.layermap) + 1 # empty iterable buffer #scan.iterable = [] scan.new_positions = positions scan.new_diff_views = diff_views scan.new_mask_views = mask_views scan.diff_views += diff_views scan.mask_views += mask_views self._update_stats(scan) # Create PODs new_pods, new_probe_ids, new_object_ids = self._create_pods(used_scans) logger.info( 'Process %d created %d new PODs, %d new probes and %d new objects.' % (parallel.rank, len(new_pods), len(new_probe_ids), len(new_object_ids)), extra={'allprocesses': True}) # Adjust storages self.ptycho.probe.reformat(True) self.ptycho.obj.reformat(True) self.ptycho.exit.reformat() self._initialize_probe(new_probe_ids) self._initialize_object(new_object_ids) self._initialize_exit(new_pods)
def new_data(self): """ Get all new diffraction patterns and create all views and pods accordingly. """ parallel.barrier() # Nothing to do if there are no new data. if not self.ptycho.datasource.data_available: return 'ok' logger.info('Processing new data.') used_scans = [] not_initialized = [] for dp in self.ptycho.datasource.feed_data(): """ A dp (data package) contains the following: common : dict or Param Meta information common to all datapoints in the data package. Variable names need to be consistent with those in the rest of ptypy package. (TODO further description) Important info: ------------------------ shape : (tuple or array) expected frame shape label : (string) a string signifying to which scan this package belongs iterable : An iterable structure that yields for each iteration a dict with the following fields: data : (np.2darray, float) diffraction data In MPI case, data can be None if distributed to other nodes mask : (np.2darray, bool) masked out areas in diffraction data array index : (int) diffraction datapoint index in scan position : (tuple or array) scan position """ meta = dp['common'] label = meta['label'] # we expect a string for the label. assert label == str(label) used_scans.append(label) logger.info('Importing data from %s as scan %s.' % (meta['label_original'],label)) # prepare scan dictionary or dig up the already prepared one scan = self.prepare_scan(label) scan.meta = meta # empty buffer scan.iterable = [] # prepare the scan geometry if not already done. if scan.get('geometries') is None: # ok now that we have meta we can check if the geometry fits scan.geometries=[] geo = scan.pars.geometry for key in ['lam','energy','psize_det','z','prop_type']: if geo[key] is None or scan.pars.if_conflict_use_meta: mk = scan.meta.get(key) if mk is not None: # None existing key or None values in meta dict are treated alike geo[key] = mk for ii,fac in enumerate(self.p.coherence.energies): geoID = geometry.Geo._PREFIX + '%02d' %ii + label g = geometry.Geo(self.ptycho,geoID,pars=geo) # now we fix the sample pixel size, This will make the frame size adapt g.p.psize_sam_is_fix = True g.energy *= fac scan.geometries.append(g) # create a buffer scan.iterable = [] scan.diff_views = [] scan.mask_views = [] # Remember the order in which these scans were fed the manager self.scan_labels.append(label) # Remember also that these new scans are probably not initialized yet not_initialized.append(label) # Buffer incoming data and evaluate if we got Nones in data for dct in dp['iterable']: dct['active'] = dct['data'] is not None scan.iterable.append(dct) # Ok data transmission is over for now. # Lets see what scans have received data and create the views for those for label in used_scans: # get scan Param scan = self.scans[label] # pick one of the geometries for calculating the frame shape geo = scan.geometries[0] sh = np.array(scan.meta.get('shape',geo.N)) # Storage generation if not already existing if scan.get('diff') is None: # this scan is brand new so we create storages for it scan.diff = self.ptycho.diff.new_storage(shape=(1,sh[-2],sh[-1]), psize=geo.psize_det, padonly=True, layermap=None) old_diff_views = [] old_diff_layers= [] else: # ok storage exists already. Views most likely also. Let's do some analysis and deactivate the old views old_diff_views = self.ptycho.diff.views_in_storage(scan.diff,active=False) old_diff_layers =[] for v in old_diff_views: old_diff_layers.append(v.layer) #v.active = False # same for mask if scan.get('mask') is None: scan.mask = self.ptycho.mask.new_storage(shape=(1,sh[-2],sh[-1]), psize=geo.psize_det, padonly=True, layermap=None) old_mask_views = [] old_mask_layers= [] else: old_mask_views = self.ptycho.mask.views_in_storage(scan.mask,active=False) old_mask_layers =[] for v in old_mask_views: old_mask_layers.append(v.layer) #v.active = False # Prepare for View genereation AR_diff_base = DEFAULT_ACCESSRULE.copy() AR_diff_base.shape = geo.N #None AR_diff_base.coord = 0.0 AR_diff_base.psize = geo.psize_det AR_mask_base = AR_diff_base.copy() AR_diff_base.storageID = scan.diff.ID AR_mask_base.storageID = scan.mask.ID diff_views = [] mask_views = [] positions = [] positions_theory = xy.from_pars(scan.pars.xy) for dct in scan.iterable: index = dct['index'] active = dct['active'] #tpos = positions_theory[index] if not scan.pars.if_conflict_use_meta and positions_theory is not None: pos = positions_theory[index] else: pos = dct.get('position') #,positions_theory[index]) AR_diff = AR_diff_base #.copy() AR_mask = AR_mask_base #.copy() AR_diff.layer = index AR_mask.layer = index # check here: is there already a view to this layer? Is it active? try: old_view = old_diff_views[old_diff_layers.index(index)] old_active = old_view.active old_view.active = active # also set this for the attached pods' exit views for pod in old_view.pods.itervalues(): pod.ex_view.active = active logger.debug('Diff view with layer/index %s of scan %s exists. \nSetting view active state from %s to %s' % (index,label,old_active,active) ) except ValueError: v = View(self.ptycho.diff, accessrule=AR_diff, active=active) diff_views.append(v) logger.debug('Diff view with layer/index %s of scan %s does not exist. \nCreating view with ID %s and set active state to %s' % (index,label,v.ID,active)) # append position also positions.append(pos) try: old_view = old_mask_views[old_mask_layers.index(index)] old_view.active = active except ValueError: v = View(self.ptycho.mask, accessrule=AR_mask, active=active) mask_views.append(v) # so now we should have the right views to this storages. Let them reformat() # that will create the right sizes and the datalist access scan.diff.reformat() scan.mask.reformat() #parallel.barrier() #print 'this' for dct in scan.iterable: parallel.barrier() if not dct['active']: continue data = dct['data'] idx = dct['index'] scan.diff.datalist[idx][:]=data #.astype(scan.diff.dtype) scan.mask.datalist[idx][:]=dct.get('mask',np.ones_like(data)) #.astype(scan.mask.dtype) #del dct['data'] #print 'hallo' scan.diff.nlayers = parallel.MPImax(scan.diff.layermap)+1 scan.mask.nlayers = parallel.MPImax(scan.mask.layermap)+1 # empty iterable buffer #scan.iterable = [] scan.new_positions = positions scan.new_diff_views = diff_views scan.new_mask_views = mask_views scan.diff_views += diff_views scan.mask_views += mask_views self._update_stats(scan) # Create PODs new_pods, new_probe_ids, new_object_ids = self._create_pods(used_scans) logger.info('Process %d created %d new PODs, %d new probes and %d new objects.' % ( parallel.rank,len(new_pods), len(new_probe_ids), len(new_object_ids)),extra={'allprocesses':True}) # Adjust storages self.ptycho.probe.reformat(True) self.ptycho.obj.reformat(True) self.ptycho.exit.reformat() self._initialize_probe(new_probe_ids) self._initialize_object(new_object_ids) self._initialize_exit(new_pods)