def sddsDemo3(input, output): x = sdds.SDDS(0) x2 = sdds.SDDS(1) x.load(input) x.save(output) x2.load(input) x2.save(output)
def read_sdds_file(filename, include_definitions=False): """ create and return dicts for the columns and the paramters""" data_file = sdds.SDDS(0) data_file.load(filename) parameters = {} parameter_def = {} for name, item in zip(data_file.parameterName, data_file.parameterData): if len(item) == 1: parameters[name] = item[0] else: parameters[name] = item for name, item in zip(data_file.parameterName, data_file.parameterDefinition): parameter_def[name] = item columns = {} column_def = {} for name, item in zip(data_file.columnName, data_file.columnData): if len(item) == 1: columns[name] = item[0] else: columns[name] = item for name, item in zip(data_file.columnName, data_file.columnDefinition): column_def[name] = item if include_definitions: return parameters, parameter_def, columns, column_def else: return parameters, columns
def getAllCols(self, sddsfile=None): """ get all available column names from sddsfile :param sddsfile: sdds file name, if not given, rollback to the one that from ``__init__()`` :return: all sdds data column names :rtype: list :Example: >>> dh = DataExtracter('test.out') >>> print(dh.getAllCols()) ['x', 'xp', 'y', 'yp', 't', 'p', 'particleID'] >>> print(dh.getAllCols('test.twi')) ['s', 'betax', 'alphax', 'psix', 'etax', 'etaxp', 'xAperture', 'betay', 'alphay', 'psiy', 'etay', 'etayp', 'yAperture', 'pCentral0', 'ElementName', 'ElementOccurence', 'ElementType'] """ if SDDS_: if sddsfile is not None: sddsobj = sdds.SDDS(2) sddsobj.load(sddsfile) else: sddsobj = self.sddsobj return sddsobj.columnName else: if sddsfile is None: sddsfile = self.sddsfile return subprocess.check_output(['sddsquery', '-col', sddsfile]).split()
def write_SDDS_file(self, filename, ascii=False, xyzoffset=[0,0,0]): """Save an SDDS file using the SDDS class.""" xoffset = xyzoffset[0] yoffset = xyzoffset[1] zoffset = xyzoffset[2] # Don't think I need this because we are using t anyway... self.sddsindex += 1 x = sdds.SDDS(self.sddsindex%20) if ascii: x.mode = x.SDDS_ASCII else: x.mode = x.SDDS_BINARY # {x, xp, y, yp, t, p, particleID} Cnames = ["x", "xp", "y", "yp", "t","p"] Ccolumns = ['x', 'xp', 'y', 'yp', 't', 'BetaGamma'] Ctypes = [x.SDDS_DOUBLE, x.SDDS_DOUBLE, x.SDDS_DOUBLE, x.SDDS_DOUBLE, x.SDDS_DOUBLE, x.SDDS_DOUBLE] Csymbols = ["", "x'","","y'","",""] Cunits = ["m","","m","","s","m$be$nc"] Ccolumns = [np.array(self.x) - float(xoffset), self.xp, np.array(self.y) - float(yoffset), self.yp, self.t , self.cp/self.E0_eV] # {Step, pCentral, Charge, Particles, IDSlotsPerBunch} Pnames = ["pCentral", "Charge", "Particles"] Ptypes = [x.SDDS_DOUBLE, x.SDDS_DOUBLE, x.SDDS_LONG] Psymbols = ["p$bcen$n", "", ""] Punits = ["m$be$nc", "C", ""] parameterData = [[np.mean(self.BetaGamma)], [abs(self._beam['total_charge'])], [len(self.x)]] for i in range(len(Ptypes)): x.defineParameter(Pnames[i], Psymbols[i], Punits[i],"","", Ptypes[i], "") x.setParameterValueList(Pnames[i], parameterData[i]) for i in range(len(Ctypes)): # name, symbol, units, description, formatString, type, fieldLength x.defineColumn(Cnames[i], Csymbols[i], Cunits[i],"","", Ctypes[i], 0) x.setColumnValueLists(Cnames[i], [list(Ccolumns[i])]) x.save(filename)
def read_SDDS_beam_file(self, fileName, charge=None, ascii=False): self.reset_dicts() self.sddsindex += 1 sddsref = sdds.SDDS(self.sddsindex%20) sddsref.load(fileName) for col in range(len(sddsref.columnName)): if len(sddsref.columnData[col]) == 1: self._beam[sddsref.columnName[col]] = sddsref.columnData[col][0] else: self._beam[sddsref.columnName[col]] = sddsref.columnData[col] SDDSparameters = dict() for param in range(len(sddsref.parameterName)): SDDSparameters[sddsref.parameterName[param]] = sddsref.parameterData[param] self.filename = fileName self['code'] = "SDDS" cp = self._beam['p'] * self.E0_eV cpz = cp / np.sqrt(self._beam['xp']**2 + self._beam['yp']**2 + 1) cpx = self._beam['xp'] * cpz cpy = self._beam['yp'] * cpz self._beam['px'] = cpx * self.q_over_c self._beam['py'] = cpy * self.q_over_c self._beam['pz'] = cpz * self.q_over_c # self._beam['t'] = self._beam['t'] self._beam['z'] = (-1*self._beam.Bz * constants.speed_of_light) * (self._beam.t-np.mean(self._beam.t)) #np.full(len(self.t), 0) if 'Charge' in SDDSparameters and len(SDDSparameters['Charge']) > 0: self._beam['total_charge'] = SDDSparameters['Charge'][0] elif charge is None: self._beam['total_charge'] = 0 else: self._beam['total_charge'] = charge self._beam['charge'] = []
def read_elegant_SDDS_content(filename, column_names=None): try: f = open(filename) except FileNotFoundError : print("File not accessible") return finally: f.close() import pandas as pd indcn=[] sddsfile = sdds.SDDS(0) sddsfile.load(filename) if column_names is None: column_names=sddsfile.columnName indcn=[i for i in range(len(sddsfile.columnName))] else: column_names=list(set(column_names) & set(sddsfile.columnName)) for name in column_names: ind = sddsfile.columnName.index(name) indcn.append(ind) column_list = (np.array(sddsfile.columnData)[indcn, 0, :].astype(float)).transpose() sdds_parameters = {} for para_name, para_value in zip(sddsfile.parameterName, sddsfile.parameterData): sdds_parameters[para_name] = para_value[0] return pd.DataFrame(data=column_list, columns=column_names), sdds_parameters
def getAllPars(self, sddsfile=None): """ get all available parameter names from sddsfile :param sddsfile: sdds file name, if not given, rollback to the one that from ``__init__()`` :return: all sdds data parameter names :rtype: list .. warning:: `sdds` needs to be installed as an extra dependency. :Example: >>> dh = DataExtracter('test.w1') >>> print(dh.getAllPars()) ['Step', 'pCentral', 'Charge', 'Particles', 'IDSlotsPerBunch', 'SVNVersion', 'Pass', 'PassLength', 'PassCentralTime', 'ElapsedCoreTime', 'MemoryUsage', 's', 'Description', 'PreviousElementName'] :seealso: :func:`getAllCols` """ if SDDS_: if sddsfile is not None: sddsobj = sdds.SDDS(2) sddsobj.load(sddsfile) else: sddsobj = self.sddsobj return sddsobj.parameterName else: if sddsfile is None: sddsfile = self.sddsfile return subprocess.check_output(['sddsquery', '-par', sddsfile]).split()
def __init__(self,name,element_length,multipole_strength,skew_strength=None,**kwargs): self.name = name self.orders = list(range(1,len(multipole_strength)+1)) self.filename = '{}_multipole.sdds'.format(self.name) self.multipole_strength = multipole_strength if skew_strength: self.skew_strength = skew_strength else: self.skew_strength = [0.0] * len(multipole_strength) self.data = [self.orders,self.multipole_strength,self.skew_strength] output_file = sdds.SDDS(1) names = ('order','KnL','JnL') units = ('None','1/m^n','1/m^n') data_types = (output_file.SDDS_INT16,output_file.SDDS_DOUBLE,output_file.SDDS_DOUBLE) for ele in self.data: logging.debug(len(ele)) paged_result = [[col] for col in self.data] for name,unit,item,data_type in zip(names,units,paged_result,data_types): output_file.columnName.append(name) output_file.columnDefinition.append([name,unit,'','',data_type,0]) output_file.columnData.append(item) params = {'L':element_length,'FILENAME':self.filename} for name,item in kwargs.items(): params[name.upper()] = item output_file.save(self.filename) accelerator.BeamlineElement.__init__(self,self.name,'FMULT',params)
def _get_sdds(): if _cfg.sdds is None: _cfg.sdds = sdds.SDDS(_SDDS_INDEX) # TODO(mvk): elegant cannot read these binary files; figure that out # _cfg.sdds = sd.SDDS_BINARY for i, n in enumerate(_FIELD_MAP_COLS): # name, symbol, units, desc, format, type, len) _cfg.sdds.defineColumn(n, '', _FIELD_MAP_UNITS[i], n, '', _cfg.sdds.SDDS_DOUBLE, 0) return _cfg.sdds
def main(input_filename, python_filename): logger = logging.getLogger('masking') logger.setLevel(logging.DEBUG) fh = logging.FileHandler('mask.log') fh.setLevel(logging.DEBUG) logger.addHandler(fh) #import particle data in from file input_params, input_param_def, input_columns, input_col_def = utils.read_sdds_file( input_filename, include_definitions=True) output_file = sdds.SDDS(1) PrintVars(locals()) #apply masking data = np.asarray([item for name, item in input_columns.items()]).T logger.info('Applying masking with {} particles'.format(len(data))) #create mask mask_obj = mask.Mask(mask_config_filename) logger.debug(mask_obj.params) result = mask_obj.apply_mask(data).T logger.info('Done applying mask, {} particles transmitted'.format( len(result.T))) #add an extra layer of brackets to result for pages paged_result = [[col.tolist()] for col in result] #write resulting bunch to new file with same name but .out ext logger.info('Setting up new file') #for ele in data_file.description: # output_file.description.append(ele) for name, item in input_params.items(): output_file.parameterName.append(name) output_file.parameterDefinition.append(input_param_def[name]) output_file.parameterData.append([item]) for name, item in zip(input_columns.keys(), paged_result): output_file.columnName.append(name) output_file.columnDefinition.append(input_col_def[name]) output_file.columnData.append(item) if '.in' in input_filename: logger.info('Writing to file {}'.format( input_filename.replace('.in', '.out'))) output_file.save(input_filename.replace('.in', '.out')) else: logger.info('Writing to file {}'.format( input_filename.replace('.in', '.out'))) output_file.save(input_filename.split('.') + '.new') logger.info('Done writing to file') del output_file
def __init__(self, index=1, ascii=False): super().__init__() self._types = SDDS_Types self._columns = munch.Munch() self._parameters = munch.Munch() self._index = index self._sddsObject = sdds.SDDS(self.index % 20) if ascii: self._sddsObject.mode = self._sddsObject.SDDS_ASCII else: self._sddsObject.mode = self._sddsObject.SDDS_BINARY
def _sdds_init(): global _SDDS_INDEX, _SDDS_DOUBLE_TYPES, _SDDS_STRING_TYPE, sdds_util, sdds if _SDDS_INDEX is not None: return from sirepo.template import sdds_util import sdds _SDDS_INDEX = 0 _s = sdds.SDDS(_SDDS_INDEX) _x = getattr(_s, 'SDDS_LONGDOUBLE', None) _SDDS_DOUBLE_TYPES = [_s.SDDS_DOUBLE, _s.SDDS_FLOAT] + ([_x] if _x else []) _SDDS_STRING_TYPE = _s.SDDS_STRING
def __init__(self, sddsfile, *kws): self.sddsfile = sddsfile self.kwslist = kws self.precision = '%.16e' self.dcmdline = 'sddsprintout {fn} -notitle -nolabel'.format(fn=self.sddsfile) self.h5data = '' if SDDS_: self.sddsobj = sdds.SDDS(1) self.sddsobj.load(self.sddsfile)
def get_SDDS_column(SDDSfile, column_name=[], convert_to_float=True): s = sdds.SDDS(0) import os.path if not os.path.isfile(SDDSfile): print('file {} does not exist'.format(SDDSfile)) exit(-1) try: s.load(SDDSfile) except: print("the file {} can not be loaded".format(SDDSfile)) exit() get_list = [] for cn in column_name: if cn in s.columnName: ind = s.columnName.index(cn) elif cn.isdigit(): ind = (int)(cn) if ind >= len(cn.columnName): print("Invalid column index {}".format(ind)) exit(0) elif cn.startswith('-') and cn[1:].isdigit(): ind = (int)(cn[1:]) if ind >= len(cn.columnName): print("Invalid column index {}".format(-1 * ind)) exit(0) ind *= (-1) else: print("Invalid column_name {}".format(cn)) exit(0) get_list.append(ind) len_col = len(s.columnData[0][0]) if convert_to_float == True: res = np.empty((len(get_list), len_col)) else: res = np.empty((len(get_list), len_col), dtype='S16') nrow = 0 for ind in get_list: res[nrow] = np.array(s.columnData[ind][0]) nrow += 1 cnlist = np.array(s.columnName) cnunit = np.array(s.columnDefinition) if convert_to_float: return res.astype(float), cnlist[get_list], cnunit[get_list, 1] return res, cnlist[get_list], cnunit[get_list, 1]
def maxwell_data_to_sdds(data,sdds_file_name='maxwell_fld.sdds'): output_file = sdds.SDDS(1) t_data = data.T names = ('x','y','z','Bx','By','Bz') units = ('m','m','m','T','T','T') paged_result = [[col.tolist()] for col in t_data] for name,unit,item in zip(names,units,paged_result): output_file.columnName.append(name) output_file.columnDefinition.append([name,unit,'','',output_file.SDDS_DOUBLE,int(len(item[0]))]) output_file.columnData.append(item) output_file.save(sdds_file_name) os.system('sddssort {} -column=z,incr -column=y,incr -column=x,incr'.format(sdds_file_name))
def loadTwi(self): self.twi = sdds.SDDS(0) self.twi.load('stdin.twi') for col in range(len(self.twi.columnName)): if len(self.twi.columnData[col]) == 1: setattr(self.twi, self.twi.columnName[col], self.twi.columnData[col][0]) else: setattr(self.twi, self.twi.columnName[col], self.twi.columnData[col]) self.SDDSparameterNames = list() for param in self.twi.columnName: if isinstance(getattr(self.twi, param)[0], (float, long)): self.SDDSparameterNames.append(param) self.updateSelectionBar() self.updatePlot()
def read_sdds_file(self, fileName, charge=None, ascii=False): # self.reset_dicts() self.sdds = sdds.SDDS(0) self.sdds.load(fileName) for col in range(len(self.sdds.columnName)): # print 'col = ', self.sdds.columnName[col] if len(self.sdds.columnData[col]) == 1: self.elegant[self.sdds.columnName[col]] = np.array( self.sdds.columnData[col][0]) else: self.elegant[self.sdds.columnName[col]] = np.array( self.sdds.columnData[col]) self.SDDSparameterNames = list() for i, param in enumerate(self.sdds.parameterName): # print 'param = ', param self.elegant[param] = self.sdds.parameterData[i]
def read_sdds_file(self, fileName, ascii=False): self.sddsindex += 1 sddsref = sdds.SDDS(self.sddsindex % 20) sddsref.load(fileName) for col in range(len(sddsref.columnName)): # print 'col = ', sddsref.columnName[col] if len(sddsref.columnData[col]) == 1: self.elegant[sddsref.columnName[col]] = np.array( sddsref.columnData[col][0]) else: self.elegant[sddsref.columnName[col]] = np.array( sddsref.columnData[col]) self.SDDSparameterNames = list() for i, param in enumerate(sddsref.parameterName): # print 'param = ', param self.elegant[param] = sddsref.parameterData[i]
def runTransformation(main): input_filename = sys.argv[2] input_params, input_param_def, input_columns, input_col_def = utils.read_sdds_file( input_filename, include_definitions=True) output_file = sdds.SDDS(1) data = np.asarray([item for name, item in input_columns.items()]).T result = main(data) paged_result = [[col.tolist()] for col in result] for name, item in input_params.items(): output_file.parameterName.append(name) output_file.parameterDefinition.append(input_param_def[name]) output_file.parameterData.append([item]) for name, item in zip(input_columns.keys(), paged_result): output_file.columnName.append(name) output_file.columnDefinition.append(input_col_def[name]) output_file.columnData.append(item) assert input_filename[-len('.in'):] == '.in' output_file.save(input_filename[:-len('.in')] + '.out')
def __init__(self,name,element_length,ref_radius,multipole_strength,**kwargs): self.name = name self.filename = '{}_kquad.sdds'.format(self.name) self.ref_radius = ref_radius self.quad_strength = multipole_strength[0] self.multipole_strength = multipole_strength[1:] self.orders = list(range(2,len(self.multipole_strength)+2)) self.f_strength = [] for order,m_strength in zip(self.orders,self.multipole_strength): self.f_strength.append(m_strength * self.ref_radius ** order /self.quad_strength) #if skew_strength: #self.skew_strength = skew_strength #else: self.skew_strength = [0.0] * len(self.multipole_strength) self.data = [self.orders,self.f_strength,self.skew_strength] output_file = sdds.SDDS(1) names = ('order','normal','skew') units = ('None','None','None') data_types = (output_file.SDDS_INT16,output_file.SDDS_DOUBLE,output_file.SDDS_DOUBLE) for ele in self.data: logging.debug(len(ele)) output_file.defineParameter('referenceRadius','','m','','',output_file.SDDS_DOUBLE,'') output_file.setParameterValue('referenceRadius',self.ref_radius,1) paged_result = [[col] for col in self.data] for name,unit,item,data_type in zip(names,units,paged_result,data_types): output_file.columnName.append(name) output_file.columnDefinition.append([name,unit,'','',data_type,0]) output_file.columnData.append(item) params = {'L':element_length,'K1':self.quad_strength,'SYSTEMATIC_MULTIPOLES':self.filename} for name,item in kwargs.items(): params[name.upper()] = item output_file.save(self.filename) accelerator.BeamlineElement.__init__(self,self.name,'KQUAD',params)
def __init__(self, filename, xaxis='x', yaxis='y', **kwargs): super(SDDSBeamPlotWidget, self).__init__(**kwargs) self.setLabels(left=yaxis, bottom=xaxis) self.sddsdata = sdds.SDDS(0) self.sddsdata.load(filename) for col in range(len(self.sddsdata.columnName)): if len(self.sddsdata.columnData[col]) == 1: setattr(self.sddsdata, self.sddsdata.columnName[col], self.sddsdata.columnData[col][0]) else: setattr(self.sddsdata, self.sddsdata.columnName[col], self.sddsdata.columnData[col]) self.SDDSparameterNames = list() for param in self.sddsdata.columnName: if isinstance(getattr(self.sddsdata, param)[0], (float, long)): self.SDDSparameterNames.append(param) self.plot = self.plot(x=getattr(self.sddsdata, xaxis), y=getattr(self.sddsdata, yaxis), pen=None, symbol='o')
def gpt_screen_to_sdds(data=None, gpt_filename=None, sdds_filename='gpt_out.sdds'): if gpt_filename: #input an gpt screen file from gdf2a with open(gpt_filename) as f: data = [] f.readline() #data_col_names = [x.split('(')[0] for x in f.readline().strip().split(' ') if not x==''] i = 0 for line in f: data.append( [x for x in line.strip().split(' ') if not x == '']) data[-1].append(i) i += 1 n_data = np.asfarray(data).T if data: n_data = data data_col_names = ['x', 'y', 't', 'xp', 'yp', 'p', 'particleID'] sdds_file = sdds.SDDS(0) col_def = {'x':['','m','','',sdds_file.SDDS_DOUBLE,0],\ 'xp':['x\'','','','',sdds_file.SDDS_DOUBLE,0],\ 'y':['','m','','',sdds_file.SDDS_DOUBLE,0],\ 'yp':['y\'','','','',sdds_file.SDDS_DOUBLE,0],\ 't':['','s','','',sdds_file.SDDS_DOUBLE,0],\ 'p':['','m$be$nc','','',sdds_file.SDDS_DOUBLE,0],\ 'particleID':['','','','',sdds_file.SDDS_LONG,0]} for name, col in zip(data_col_names, n_data): sdds_file.defineColumn(name, *col_def[name]) sdds_file.setColumnValueList(name, col.tolist(), 1) if sdds_filename: sdds_file.save(sdds_filename) else: sdds_file.save(gpt_filename.split('.')[0] + '.sdds') return n_data
def elegant_findtwiss(lattice, beamline_to_use=None, rootname='temp', matched=1, initial_optics=[1, 0, 0, 0, 1, 0, 0, 0], alternate_element={}, closed_orbit=1, gamma0=1.0e4 / 0.511, twiss_columns=[ 's', 'betax', 'alphax', 'psix', 'etax', 'etaxp', 'betay', 'alphay', 'psiy', 'etay', 'etayp' ]): ''' Find twiss parameter using elegant :param lattice: The elegantLatticeFile object to be used :param beamline_to_use: The beamline to be used. Default is None, then lattice.useline is used :param rootname: The rootname of the output file. Default is 'temp'. :param matched: 1: seek for matched solution, 0: Use initial_optics as initial condition :param initial_optics: Initial optics to start with, default is [1, 0, 0, 0, 1, 0, 0, 0] :param alternate_element: No use for now :param closed_orbit: No use for now :param gamma0: The reference lorentz factor of the particle :param twiss_columns: The output columns of the twiss parameters, default is ['s', 'betax', 'alphax', 'psix', 'etax', 'etaxp', 'betay', 'alphay', 'psiy', 'etay', 'etayp'] :return: return tulip of twiss numpy array and dictionary of parameters:(twiss array, twiss parameters) ''' if beamline_to_use is None: beamline_to_use = lattice.useline lattice.write('{}.lte'.format(rootname)) cmd_file = elegantCommandFile('{}.ele'.format(rootname)) cmd_file.addCommand( 'run_setup', lattice='{}.lte'.format(rootname), use_beamline=beamline_to_use, rootname=rootname, p_central=np.sqrt(np.power(gamma0, 2.0) - 1), centroid='%s.cen', default_order=3, concat_order=3, ) '''cmd_file.addCommand('closed_orbit', output='%s.clo', closed_orbit_iterations=1500, closed_orbit_accuracy=1e-12, iteration_fraction=0.3 )''' cmd_file.addCommand( 'twiss_output', matched=matched, output_at_each_step=0, filename='%s.twi', radiation_integrals=1, beta_x=initial_optics[0], alpha_x=initial_optics[1], eta_x=initial_optics[2], etap_x=initial_optics[3], beta_y=initial_optics[4], alpha_y=initial_optics[5], eta_y=initial_optics[6], etap_y=initial_optics[7], ) cmd_file.addCommand('run_control') cmd_file.addCommand('bunched_beam') cmd_file.addCommand('track') cmd_file.write() cmdstr = 'elegant {}.ele'.format(rootname) with open(os.devnull, "w") as f: subp.call(shlex.split(cmdstr), stdout=f) twifile = sdds.SDDS(0) twifile.load('{}.twi'.format(rootname)) inds = [] for name in twiss_columns: ind = twifile.columnName.index(name) inds.append(ind) twiss_list = np.array(twifile.columnData)[inds, 0, :].astype(float) twiss_parameter = {} for para_name, para_value in zip(twifile.parameterName, twifile.parameterData): twiss_parameter[para_name] = para_value[0] return twiss_list, twiss_parameter
rect1 = [left, 0.55, width, 0.40] #left, bottom, width, height rect2 = [left, 0.08, width, 0.40] fig = plt.figure(1, figsize=(12, 9)) ax1 = fig.add_axes(rect1) ax4 = fig.add_axes(rect2, sharex=ax1) for file in args.files: fileOK = os.path.isfile(file) if not fileOK: print("file not found") sys.exit() print("reading ", file) data = sdds.SDDS(0) data.load(file) if args.listpar: data.listParameters() if args.listcol: data.listColumns() if ("t" in data.columnName): t = np.array(data.getColumnData("t")) * 1e9 if ("t0" in data.parameterName and "dt" in data.parameterName and "NumberTimeSteps" in data.parameterName): t0 = data.getParameterValue("t0") dt = data.getParameterValue("dt") nots = data.getParameterValue("NumberTimeSteps") print(r'%d steps starting at t0=%12.3g s step dt=%12.3g s' % (nots, t0, dt))
_FILE_ID_SEP = '-' _OUTPUT_INFO_FILE = 'outputInfo.json' _OUTPUT_INFO_VERSION = '2' _PLOT_TITLE = PKDict({ 'x-xp': 'Horizontal', 'y-yp': 'Vertical', 'x-y': 'Cross-section', 't-p': 'Longitudinal', }) _SDDS_INDEX = 0 _s = sdds.SDDS(_SDDS_INDEX) _x = getattr(_s, 'SDDS_LONGDOUBLE', None) _SDDS_DOUBLE_TYPES = [_s.SDDS_DOUBLE, _s.SDDS_FLOAT] + ([_x] if _x else []) _SDDS_STRING_TYPE = _s.SDDS_STRING _SIMPLE_UNITS = ['m', 's', 'C', 'rad', 'eV'] _X_FIELD = 's' class CommandIterator(lattice.ElementIterator): def start(self, model): super(CommandIterator, self).start(model) if model._type == 'run_setup': self.fields.append(['semaphore_file', _ELEGANT_SEMAPHORE_FILE])
def write_data(self, method_name, objective_func, devices=[], maximization=False, max_iter=0): """ Save optimization parameters to the Database :param method_name: (str) The used method name. :param objective_func: (Target) The Target class object. :param devices: (list) The list of devices on this run. :param maximization: (bool) Whether or not the data collection was a maximization. Default is False. :param max_iter: (int) Maximum number of Iterations. Default is 0. :return: status (bool), error_msg (str) """ """ H. Shang, found the first point of the devices are reduandant, should be removed for GP optimizer, the first 3 points and the last point of the objective should be removed. for other optimizers, the first and last point of the objective should be removed. """ def byteify(input): if isinstance(input, dict): return { byteify(key): byteify(value) for key, value in input.iteritems() } elif isinstance(input, list): return [byteify(element) for element in input] elif isinstance(input, unicode): return input.encode('utf-8') else: return input def removeUnicodeKeys(input_dict): # implemented line 91 return dict([(byteify(e[0]), e[1]) for e in input_dict.items()]) print(self.name + " - Write Data: ", method_name) try: # if GP is used, the model is saved via saveModel first self.data except: self.data = {} # dict of all devices deing scanned objective_func_pv = objective_func.eid print(objective_func_pv) self.data[objective_func_pv] = [] # detector data array self.data['DetectorAll'] = [] # detector acquisition array self.data['DetectorStat'] = [] # detector mean array self.data['DetectorStd'] = [] # detector std array self.data['timestamps'] = [] # timestamp array self.data['charge'] = [] self.data['current'] = [] self.data['stat_name'] = [] # end try/except self.data['pv_list'] = [dev.eid for dev in devices] # device names for dev in devices: self.data[dev.eid] = [] for dev in devices: vals = len(dev.values) #removed the frist point of device, which is redundant to the second point self.data[dev.eid].append(dev.values[1:]) self.data['devtime'] = dev.times[1:] print(objective_func.times) objvals = len(objective_func.values) start = 0 end = objvals if objvals - vals == 1: #for other optimizers, the first and last objective should be removed (frist point is redundant) #the objective fucntion has 1 more values than the variables start = 1 end = -1 objective_func.niter -= 2 if objvals - vals == 3: #for gp optimizaers, the objective has 3 more values than the variables #the first 3 and the last of objective should be remove (first point is redundant) start = 3 end = -1 objective_func.niter -= 4 objective_func.values = objective_func.values[start:end] objective_func.objective_means = objective_func.objective_means[ start:end] objective_func.objective_acquisitions = objective_func.objective_acquisitions[ start:end] objective_func.times = objective_func.times[start:end] objective_func.std_dev = objective_func.std_dev[start:end] objective_func.charge = objective_func.charge[start:end] objective_func.current = objective_func.current[start:end] try: objective_func.losses = objective_func.losses[start:end] except: pass self.data[objective_func_pv].append( objective_func.objective_means) # this is mean for compat print(self.data[objective_func_pv]) self.data['DetectorAll'].append(objective_func.objective_acquisitions) self.data['DetectorStat'].append(objective_func.values) self.data['DetectorStd'].append(objective_func.std_dev) self.data['timestamps'].append(objective_func.times) self.data['charge'].append(objective_func.charge) self.data['current'].append(objective_func.current) self.data['stat_name'].append(objective_func.stats.display_name) for ipv in range(len(self.losspvs)): self.data[self.losspvs[ipv]] = [ a[ipv] for a in objective_func.losses ] self.detValStart = self.data[objective_func_pv][0] self.detValStop = self.data[objective_func_pv][-1] # replace with matlab friendly strings # for key in self.data: # key2 = key.replace(":", "_") # self.data[key2] = self.data.pop(key) # extra into to add into the save file self.data["MachineInterface"] = self.name try: self.data[ "epicsname"] = epics.name # returns fakeepics if caput has been disabled except: pass self.data["niter"] = objective_func.niter #self.data["BEND_DMP1_400_BDES"] = self.get_value("BEND:DMP1:400:BDES") #self.data["Energy"] = self.get_energy() self.data["ScanAlgorithm"] = str( method_name) # string of the algorithm name self.data["ObjFuncPv"] = str( objective_func_pv) # string identifing obj func pv self.data['DetectorMean'] = str(objective_func_pv.replace( ":", "_")) # reminder to look at self.data[objective_func_pv] # TODO: Ask Joe if this is really needed... #self.data["NormAmpCoeff"] = norm_amp_coeff path = simlog.getPath() filename = 'OcelotScan-' + method_name + '-' + simlog.getFileTs( ) + '.sdds' fout = os.path.join(path, filename) sddsData = sdds.SDDS(0) sddsData.mode = sddsData.SDDS_ASCII objective_func_pv = objective_func.eid sddsData.defineSimpleParameter("Objective", sddsData.SDDS_STRING) sddsData.defineSimpleParameter("ScanAlgorithm", sddsData.SDDS_STRING) sddsData.defineSimpleParameter("Number_Of_Iterations", sddsData.SDDS_LONG) sddsData.defineSimpleParameter("DataPoints", sddsData.SDDS_LONG) sddsData.defineSimpleParameter("HyperParFile", sddsData.SDDS_STRING) sddsData.defineSimpleColumn("Time", sddsData.SDDS_DOUBLE) sddsData.defineSimpleColumn("DeviceTime", sddsData.SDDS_DOUBLE) sddsData.defineSimpleColumn(objective_func_pv, sddsData.SDDS_DOUBLE) print(fout) for device in self.data['pv_list']: # print(device) sddsData.defineSimpleColumn(device, sddsData.SDDS_DOUBLE) for ipv in range(len(self.losspvs)): sddsData.defineSimpleColumn(self.losspvs[ipv], sddsData.SDDS_DOUBLE) self.data[self.losspvs[ipv]] = [ a[ipv] for a in objective_func.losses ] sddsData.setParameterValue("Objective", str(objective_func_pv), 1) sddsData.setParameterValue("ScanAlgorithm", self.data["ScanAlgorithm"], 1) sddsData.setParameterValue("Number_Of_Iterations", int(self.data["niter"]), 1) sddsData.setParameterValue("DataPoints", objective_func.points, 1) parFile = '/usr/local/oag/3rdParty/OcelotOptimizer-dev/parameters/anl_hyperparams.pkl' sddsData.setParameterValue('HyperParFile', os.readlink(parFile), 1) print('timestamp from objective function') print(self.data['timestamps'][0]) for pv in self.data['pv_list']: print(pv) print(self.data[pv][0]) rows = len(self.data[pv][0]) sddsData.setColumnValueList(pv, self.data[pv][0], 1) rows1 = len(self.data['timestamps'][0]) #print(self.data['devtime']) #print((self.data['timestamps'][0]) sddsData.setColumnValueList("DeviceTime", self.data['devtime'], 1) sddsData.setColumnValueList("Time", self.data['timestamps'][0], 1) sddsData.setColumnValueList(str(objective_func_pv), self.data[objective_func_pv][0], 1) # print(self.data[objective_func_pv]) # sddsData.setColumnValueList(str(objective_func_pv), self.data[objective_func_pv][0], 1) print('objective values from objective function') print(self.data[objective_func_pv][0]) rows1 = len(self.data[objective_func_pv][0]) #for pv in self.data['pv_list']: # print(self.data[pv][0]) # sddsData.setColumnValueList(pv, self.data[pv][0],1) for ipv in range(len(self.losspvs)): # print(self.data[self.losspvs[ipv]]) sddsData.setColumnValueList(self.losspvs[ipv], self.data[self.losspvs[ipv]], 1) print('save sdds') try: sddsData.save(fout) self.last_filename = fout print('Saved scan data to ', self.last_filename) except: print('Error saving sdds file') # clear for next run self.data = dict() return True, ""
def main(var): file1 = var + '.w1' file2 = var + '.w3' file_emit = var + '.fin' f1 = sdds.SDDS(0) # create an SDDS-interface object f2 = sdds.SDDS(0) # create an SDDS-interface object f3 = sdds.SDDS(0) f1.load(file1) f2.load(file2) f3.load(file_emit) t1 = np.array(f1.columnData[4][0]) p_mc1 = np.array(f1.columnData[5][0]) t2 = np.array(f2.columnData[4][0]) p_mc2 = np.array(f2.columnData[5][0]) x1 = np.array(f1.columnData[0][0]) xp1 = np.array(f1.columnData[1][0]) x2 = np.array(f2.columnData[0][0]) xp2 = np.array(f2.columnData[1][0]) y1 = np.array(f1.columnData[2][0]) yp1 = np.array(f1.columnData[3][0]) y2 = np.array(f2.columnData[2][0]) yp2 = np.array(f2.columnData[3][0]) t01 = 1. * sum(t1) / len(t1) t02 = 1. * sum(t2) / len(t2) z1 = np.array([-(x - t01) * c * (10**3) for x in t1]) z2 = np.array([-(y - t02) * c * (10**3) for y in t2]) p1 = np.array([x * 0.511 for x in p_mc1]) p2 = np.array([y * 0.511 for y in p_mc2]) c1 = TCanvas('c1', 'test', 800, 800) c1.Divide(4, 2) ##############z-p c1.cd(1) gr1 = TGraph(len(z1), z1, p1) gr2 = TGraph(len(z2), z2, p2) gr1.GetXaxis().SetTitle('z [mm]') gr1.GetYaxis().SetTitle('p [MeV]') gr1.SetMarkerStyle(1) gr2.SetMarkerStyle(1) gr2.SetMarkerColor(2) gr1.SetTitle('[Z - P]') gr1.GetXaxis().SetLimits(min(np.concatenate((z1, z2))), max(np.concatenate((z1, z2)))) gr1.Draw("ap") gr2.Draw("psame") ######################################### CURRENT c1.cd(2) n_bins = 100 a = -3. b = 3. h1 = TH1F('h1', 'Density Current', n_bins, a, b) h2 = TH1F('h2', 'Density Current', n_bins, a, b) for i in z1: h1.Fill(i) for i in z2: h2.Fill(i) h2.SetLineColor(2) h2.GetXaxis().SetTitle('z, [mm]') h2.GetYaxis().SetTitle('I, [kA]') h1.Scale(0.3 * n_bins / (b - a) / 50000.) h2.Scale(0.3 * n_bins / (b - a) / 50000.) h2.SetMaximum(0.2) h2.Draw('hist') h1.Draw("histsame") #############################################XXXXXXX-PPPPXXXX c1.cd(3) gr3 = TGraph(len(x1), x1 * 1000, xp1) gr4 = TGraph(len(x2), x2 * 1000, xp2) gr4.GetXaxis().SetTitle('x [mm]') gr4.GetYaxis().SetTitle('xp [mm]') gr3.SetMarkerStyle(1) gr4.SetMarkerStyle(1) gr4.SetMarkerColor(2) gr4.SetTitle('[X - PX]') gr4.Draw("ap") gr3.Draw("psame") #######################################################YYYYYYYYYYYYY--PYYYYYYYYYYY c1.cd(4) gr5 = TGraph(len(y1), y1 * 1000, yp1) gr6 = TGraph(len(y2), y2 * 1000, yp2) gr5.GetXaxis().SetTitle('y [mm]') gr5.GetYaxis().SetTitle('yp [mm]') gr5.SetMarkerStyle(1) gr6.SetMarkerStyle(1) gr6.SetMarkerColor(2) gr5.SetTitle('[Y - PY]') gr5.GetXaxis().SetLimits( min(np.concatenate((y1, y2))) * 1000, max(np.concatenate((y1, y2))) * 1000) gr5.Draw("ap") gr6.Draw("psame") ############################################# ###################################################XXXXXXXXXXXXXXX-YYYYYYYYYYY c1.cd(5) gr7 = TGraph(len(x1), x1 * 1000, y1 * 1000) gr8 = TGraph(len(x2), x2 * 1000, y2 * 1000) gr8.GetXaxis().SetTitle('x [mm]') gr8.GetYaxis().SetTitle('y [mm]') gr7.SetMarkerStyle(1) gr8.SetMarkerStyle(1) gr8.SetMarkerColor(2) gr8.SetTitle('[X - Y]') gr8.GetXaxis().SetLimits(min(z2), max(z2)) gr8.Draw("ap") gr7.Draw("psame") #############################################XXXXXXXXx-ZZZZZZZZZZZZ c1.cd(7) gr9 = TGraph(len(z1), z1, x1 * 1000) gr10 = TGraph(len(z2), z2, x2 * 1000) main.graph1 = TGraph(len(z2), z2, x2 * 1000) gr10.GetXaxis().SetTitle('z [mm]') gr10.GetYaxis().SetTitle('x [mm]') gr9.SetMarkerStyle(1) gr9.SetMarkerStyle(1) gr10.SetMarkerColor(2) gr10.SetTitle('[Z - X]') gr10.GetXaxis().SetLimits(min(np.concatenate((z1, z2))), max(np.concatenate((z1, z2)))) #gr9.SetMaximum(0.8) #gr9.SetMinimum(-0.8) gr10.Draw("ap") gr9.Draw("psame") ##############################YYYYYYY-ZZZZZZZZZZZZ c1.cd(6) gr11 = TGraph(len(z1), z1, y1 * 1000) gr12 = TGraph(len(z2), z2, y2 * 1000) gr12.GetXaxis().SetTitle('z [mm]') gr12.GetYaxis().SetTitle('y [mm]') gr11.SetMarkerStyle(1) gr12.SetMarkerStyle(1) gr12.SetMarkerColor(2) gr12.SetTitle('[Z - Y]') gr12.GetXaxis().SetLimits(min(np.concatenate((z1, z2))), max(np.concatenate((z1, z2)))) gr12.Draw("ap") gr11.Draw("psame") ############################################# #DRAAAWING BETA FUNCTIONS #USE plot_beta.py input('kerk')
def elegant_track(lattice, beamline_to_use=None, Npar=1, rootname='temp', initial_optics=[1, 0, 0, 0, 1, 0, 0, 0], emit_x=0.0, emit_nx=0.0, emit_y=0.0, emit_ny=0.0, centroid_columns=[ 's', 'Cx', 'Cxp', 'Cy', 'Cyp', 'Cs', 'Cdelta', 'pCentral' ], sigma_columns=[ 's', 'Sx', 'Sxp', 'Sy', 'Syp', 'Ss', 'Sdelta', 'ex', 'ecx', 'ey', 'ecy' ], gamma0=1.0e4 / 0.511): ''' :param lattice: The elegantLatticeFile object to be used :param beamline_to_use: The beamline to be used. Default is None, then lattice.useline is used :param Npar: Number of particles used in tracking, default is 1 :param rootname: rootname: The rootname of the output file. Default is 'temp'. :param initial_optics: Initial optics to start with, default is [1, 0, 0, 0, 1, 0, 0, 0] :param emit_x: Initial geometric emittance of horizontal direction :param emit_nx: Initial normalized emittance of horizontal direction, ignored if emit_x is given :param emit_y: Initial geometric emittance of vertical direction :param emit_ny: Initial normalized emittance of vertical direction, ignored if emit_y is given :param centroid_columns: The output columns of centroids Default is: ['s','Cx','Cxp','Cy','Cyp','Cs','Cdelta','pCentral'] :param sigma_columns: The output columns of sigmas, Default is: ['s','Sx','Sxp','Sy','Syp','Ss','Sdelta', 'ex','ecx','ey','ecy'] :param gamma0: The reference lorentz factor of the particle :return: return tulip of two numpy array:(centroid_array, sigma_array) ''' if beamline_to_use is None: beamline_to_use = lattice.useline lattice.write('temp.lte') cmd_file = elegantCommandFile('{}.ele'.format(rootname)) cmd_file.addCommand( 'run_setup', lattice='{}.lte'.format(rootname), use_beamline=beamline_to_use, rootname=rootname, p_central=np.sqrt(np.power(gamma0, 2.0) - 1), centroid='%s.cen', sigma='%s.sig', default_order=3, concat_order=3, ) cmd_file.addCommand('run_control') cmd_file.addCommand( 'bunched_beam', n_particles_per_bunch=Npar, emit_x=emit_x, emit_nx=emit_nx, beta_x=initial_optics[0], alpha_x=initial_optics[1], eta_x=initial_optics[2], etap_x=initial_optics[3], emit_y=emit_y, emit_ny=emit_ny, beta_y=initial_optics[4], alpha_y=initial_optics[5], eta_y=initial_optics[6], etap_y=initial_optics[7], ) cmd_file.addCommand('track') cmd_file.write() cmdstr = 'elegant {}.ele'.format(rootname) with open(os.devnull, "w") as f: subp.call(shlex.split(cmdstr), stdout=f) cenfile = sdds.SDDS(0) cenfile.load('{}.cen'.format(rootname)) inds = [] for name in centroid_columns: ind = cenfile.columnName.index(name) inds.append(ind) centroid = np.array(cenfile.columnData)[inds, 0, :].astype(float) sigfile = sdds.SDDS(0) sigfile.load('{}.sig'.format(rootname)) inds = [] for name in sigma_columns: ind = sigfile.columnName.index(name) inds.append(ind) sigma = np.array(sigfile.columnData)[inds, 0, :].astype(float) return centroid, sigma
import matplotlib.pyplot as plt from matplotlib.colors import LogNorm import numpy as np automatic_range = False x_range = [-1200, 1200] y_range = [-1200, 1200] z_range = [-600, 600] xp_range = [-1e-3, 1e-3] p_range = [149.5, 150.5] deltap_range = [-0.5, 0.5] file_name = sys.argv[1] sdds_object = sdds.SDDS(0) # create an SDDS-interface object sdds_object.load(file_name) # open an SDDS file named <file_name> # x.columnName - a list of columns in the file # x.columnData - column data x = sdds_object.columnData[0][0] xp = sdds_object.columnData[1][0] y = sdds_object.columnData[2][0] t = sdds_object.columnData[4][0] p = sdds_object.columnData[5][0] pmean = np.average(p) tmean = np.average(t) x_micro = [i * 1e6 for i in x] y_micro = [i * 1e6 for i in y]
} _OUTPUT_INFO_FILE = 'outputInfo.json' _OUTPUT_INFO_VERSION = '2' _PLOT_TITLE = { 'x-xp': 'Horizontal', 'y-yp': 'Vertical', 'x-y': 'Cross-section', 't-p': 'Longitudinal', } _SDDS_INDEX = 0 _SDDS_Singleton = sdds.SDDS(_SDDS_INDEX) x = getattr(_SDDS_Singleton, 'SDDS_LONGDOUBLE', None) _SDDS_DOUBLE_TYPES = [ _SDDS_Singleton.SDDS_DOUBLE, _SDDS_Singleton.SDDS_FLOAT, ] if x is not None: _SDDS_DOUBLE_TYPES.append(x) _SDDS_STRING_TYPE = _SDDS_Singleton.SDDS_STRING _SCHEMA = simulation_db.get_schema(elegant_common.SIM_TYPE) _SIMPLE_UNITS = ['m', 's', 'C', 'rad', 'eV']