def run_heuristic(num_employees_license, num_employees_credit, num_employees_other, confidence, morning, total_daily): SERVICE_TIME = 12 # Minutes it takes to answer a message SIM_TIME = 360 if morning: SIM_TIME = 180 # Simulation time in minutes RANDOM_SEED = random.randint(30, 90) random.seed(RANDOM_SEED) license_waiting_time_avg = [] credit_waiting_time_avg = [] other_waiting_time_avg = [] for iteration in range(10): results_dict = sim(num_employees_license, num_employees_credit, num_employees_other, SERVICE_TIME, SIM_TIME, morning, total_daily) license_waiting_time_avg.append(results_dict['license']) credit_waiting_time_avg.append(results_dict['credit']) other_waiting_time_avg.append(results_dict['other']) license_confidence_interval = mean_confidence_interval( license_waiting_time_avg, confidence) credit_confidence_interval = mean_confidence_interval( credit_waiting_time_avg, confidence) other_confidence_interval = mean_confidence_interval( other_waiting_time_avg, confidence) return_dict = { 'license': license_confidence_interval, 'credit': credit_confidence_interval, 'other': other_confidence_interval, } return return_dict
def parse2Hoc(filename): # create simulation object. snnapSim = sim(filename) nrnModel = NRNModel(snnapSim) return snnapSim
def select_folder_percerr(self): results = sim.sim(self, self.numpoints, self.radius, self.prec, self.iter, self.binsize, self.percerror) precision, one, two, three, four, five, six, seven, eight, nine, ten = results self.message.set( str(precision) + ' nm route localization precision\n' + str(ten) + '% confidence +/- precsion')
def parse2Hoc(filename, cond): # create simulation object. snnapSim = sim(filename) # write model in NEURON if cond == 'p': nrnModel = NRNModelPoint(snnapSim) elif cond == 'd': nrnModel = NRNModelDist(snnapSim) return snnapSim
def select_folder_percerr(self): results = sim.sim(self, self.numpoints, self.radius, self.prec, self.iter, self.binsize, self.percerror) precision, one, two, three, four, five, six, seven, eight, nine, ten = results self.message.set(str(ten) + ' reproducibility %')
def select_folder_percerr(self): results = sim.sim(self, self.numpoints, self.radius, self.prec, self.iter, self.binsize, self.percerror, self.a, self.rotation) precision = results self.message.set('deviation (nm): '+str(precision))
def run(args, logger, plotter): # Read config file for simulation parameters. # cfg = readConfigFile(logger, args.i) xtra_header_keys = {} # dict of extra keys and values for FITS header st = time.time() logger.debug(" Beginning simulation") # Build Zemax spectrograph model # zspec = zSpectrograph(cfg['SIM_COLLIMATOR_ZMX_FILE'], cfg['SIM_CAMERA_ZMX_FILE']) # Build the instrument using the instrument_builder package. # # Although we construct the components from the configuration files, we # override some attributes to both ensure consistency and accept some # limitations of the current version. # # -> Assign camera/collimator EFFL to directly match the Zemax models. # -> Set the preoptics WFNO such that a spatial resolution element spans # [IFU_SLICES_PER_RESEL] slices. # -> Set the preoptics anamorphic magnification to 1 # instrument = SWIFT_like(cfg['PREOPTICS_CFG_NAME'], cfg['IFU_CFG_NAME'], cfg['SPECTROGRAPH_CFG_NAME'], cfg['DETECTOR_CFG_NAME'], config_dir=cfg['SIM_INSTRUMENT_CONFIGS_DIR_PATH'], logger=logger) zspec_attr = zspec.getSystemAttr(cfg['PUPIL_REFERENCE_WAVELENGTH']) instrument.spectrograph.cfg['camera_EFFL'] = zspec_attr['camera_EFFL'] instrument.spectrograph.cfg['collimator_EFFL'] = zspec_attr['collimator_EFFL'] instrument.preoptics.cfg['magnification_across_slices'] = \ instrument.preoptics.cfg['magnification_along_slices'] # following assumes diffraction-limited reimaging performance instrument.preoptics.cfg['WFNO'] = cfg['IFU_SLICES_PER_RESEL'] * \ instrument.ifu.cfg['slice_width_physical'] / \ cfg['PUPIL_REFERENCE_WAVELENGTH'] instrument.assemble() # Get wavelength range as decimal. # waves = np.arange(cfg['SIM_WAVELENGTH_START'], Decimal(cfg['SIM_WAVELENGTH_END'] + \ cfg['SIM_WAVELENGTH_INTERVAL']), Decimal(cfg['SIM_WAVELENGTH_INTERVAL']), dtype=Decimal) # Determine the smallest pupil size (D_pupil) that corresponds to Nyquist # sampling at the detector given a camera focal length, f_cam, and some # reference wavelength, lambda. # # As we want to be AT LEAST Nyquist sampling at all wavelengths, we really # want to ensure that the system is Nyquist sampling at the shortest # wavelength. Doing so means that longer wavelengths will be oversampled. # The wavelength at which the pupil is constructed for can be adjusted in # the configuration file as [pupil.reference_wavelength]. # # We get [D_pupil], the pupil size for a single slice, by equating the spatial # size of 2 detector pixels (D_p) with the spatial size of one resolution # element, i.e. # # 2 x D_p = lambda*f_cam / D_pupil # pupil_physical_diameter = (cfg['PUPIL_REFERENCE_WAVELENGTH'] * \ instrument.camera_EFFL)/(2*instrument.detector_pixel_pitch) pupil_physical_radius = (pupil_physical_diameter/2) xtra_header_keys['EPD'] = (pupil_physical_diameter*1e3, "physical entrance pupil diameter (mm)") # Now find parameters with which we will rescale the image. # # As the angular size (and thus spatial size) of the resolution element is # dependent on lambda, we need to define a reference system through which we # can resample each wavelength. The wavelength this is done for is defined # in the configuration file as [pupil.resample_to_wavelength]. # # To do this, we first stablish the spatial pixel scale (m/px) for the # reference wavelength. The latter is then used to determine the scale factor # which needs to be applied to the wavelength currently being considered. To # avoid extrapolation, this reference wavelength should be blueward of the # smallest wavelength to be considered. # # All the information required to rescale is held in the [resampling_im] # instance. # logger.debug(" Ascertaining parameters to resample to " + \ str(cfg['PUPIL_RESAMPLE_TO_WAVELENGTH']*Decimal('1e9')) + "nm") resampling_pupil = pupil_circular(logger, cfg['PUPIL_SAMPLING'], cfg['PUPIL_GAMMA'], pupil_physical_radius, verbose=True) preoptics_reimager = reimager(instrument.preoptics_WFNO) resampling_im = resampling_pupil.toConjugateImage( cfg['PUPIL_RESAMPLE_TO_WAVELENGTH'], preoptics_reimager, verbose=True) # Initialise datacube and run simulations for each wavelength. # # The result from a simulation is an image instance which we can append # to the datacube. # dcube = cube(logger) s = sim(logger, plotter, resampling_im, resampling_pupil, len(waves), preoptics_reimager, zspec, cfg, instrument) for idx, w in enumerate(waves): logger.info(" !!! Processing for a wavelength of " + str(float(w)*1e9) + "nm...") this_st = time.time() dcube.append(s.run(w, verbose=args.v)) this_duration = time.time()-this_st if idx>0: logger.debug(" Last wavelength iteration took " + str(int(this_duration)) + "s, expected time to completion is " + str(int(((len(waves)-(idx+1))*this_duration))) + "s.") # Make and view output. # # Note that pyds9 interaction only works with systems with the XPA protocol # installed, and i've yet to find a way to get this working on a Windows box. # dcube.write(args, cfg, xtra_header_keys) if args.d: import pyds9 d = pyds9.DS9() d.set("file " + args.f) d.set('cmap heat') d.set('scale log') d.set('zoom 4') duration = time.time()-st logger.debug(" This simulation completed in " + str(sf(duration, 4)) + "s.")
def main(): sim()
print("SIMPLE EPIDEMIC TRANSMISION MODEL") print("Human to human disease simulation") print("---------------------------------") print("---https://github/rvalla/SETM----") print("#################################") #Running desired number of simulations... for i in range(simulationsCount): print("", end="\n") print("Starting simulation number " + str(i + 1), end="\n") simulationName = simulationsName simulationName = simulationsName + "_" + str(i) if i == 0: dt.saveConfigStart(simulationsPopulation, simulationsPeriod, simulationName, areaBDensity, runGovActions, \ govActions, govFailureList, autoIsolationThreshold, startingImmunity, behavior, behaviorTrigger, \ behaviorOff, behaviorFactor) s = sim(simulationsPopulation, simulationsPeriod, i + 1, casesCeroCount, simulationName, areaBDensity, \ runGovActions, govActions, govFailureList, autoIsolationThreshold, startingImmunity, behavior, \ behaviorTrigger, behaviorOff, behaviorFactor) vz.getFileNames(simulationName) vz.populationVisualization(simulationName) govActionsCycles = sim.getGovActionsCycles() behaviorCycles = sim.getBehaviorCycles() print("Building data visualization...", end="\r") vz.simulationVisualization(simulationName, runGovActions, govActionsCycles, behavior, behaviorCycles, simulationsPopulation) vz.infectionsVisualization(simulationName, simulationsPeriod, runGovActions, govActionsCycles, behavior, behaviorCycles) print("Data visualization complete! ", end="\n") print("", end="\n")
def run_analysis(data_failure, data_downtime, rep=100): global res_failure_model global res_downtime_model global gamma global alpha global beta global availability global predictions global kaplan_meier_obj global renewal_prop global num_failures global gammaLower, gammaUpper, alphaLower, alphaUpper, betaLower, betaUpper global conf_int_A T = list() for i in range(len(data_failure)): if i == 0: T.append(data_failure[0]) else: T.append(data_failure[i] + T[-1]) res_failure_model = modsel.akaike_information_criterion( mle.failure_models(T)) # res_failure_model = mle.failure_models(T)[1] res_downtime_model = downtime.downtime_accepted_models(data_downtime)[1] gamma, alpha, beta = kpicalc.model_parameters(res_failure_model, disp=False)[0] ci_params = kpicalc.model_parameters(res_failure_model, disp=False)[1] gammaLower, gammaUpper = ci_params[0] alphaLower, alphaUpper = ci_params[1] betaLower, betaUpper = ci_params[2] T_G, S = simulation.failure_repair_process(data_failure, data_downtime) S = [0] + S T_G_R, S_R, X_R, D_R, T_R = simulation.sim(rep, res_failure_model, res_downtime_model, numfail=len(T), timeHorizon=max(T_G)) # t = kpicalc.time_axis(1, T_G) t = linspace(0, max(T_G)).tolist() A, conf_int_A = kpicalc.A_Sim(t, T_G_R, S_R) availability = (t, A) predictions = [ pred.point_predictor(T[-1], res_failure_model), pred.interval_predictor(T[-1], res_failure_model) ] kaplan_meier_obj = [ kpicalc.reliability(n, len(data_failure) + 1, X_R) for n in range(1, len(data_failure) + 2) ] renewal_prop = kpicalc.rs_method(500, T, res_failure_model) num_failures = kpicalc.num_failures(T) btn_results["state"] = "normal"
import analysis import simulation G=20 K= 40 Beta = [0.25,0.5,0.75] Alpha = [0.1,0.2,0.3,0.4,0.5,0.6,0.7,0.8,0.9] anaRes=[] simRes=[] for beta in Beta: anaRes.append([analysis.ana(K,G,alpha,beta) for alpha in Alpha]) simRes.append([int(simulation.sim(K,G,alpha,beta)) for alpha in Alpha]) print 'K2Galpha = ',Alpha,';' print 'K2Gbeta = ',Beta,';' print 'K2GanaRes = [', for lst in anaRes: print lst,';' print '];' print 'K2GsimRes = [', for lst in simRes: print lst,';' print '];' '''