def statistics(filename, rndgen, samsize, interval): empty_file(filename) header = ["u1", "u2"] low = interval[0] high = interval[1] observed = [] found = 0 u1 = rndgen.rnd() for i in range(1, samsize): u2 = rndgen.rnd() if low <= u1 <= high and low <= u2 <= high: found += 1 observed.append((u1, u2)) print_progress(i, samsize, message="Found {}".format(found)) u1 = u2 if len(observed) == MAX_OBSERVATIONS_BEFORE_FLUSH: save_csv(filename, header, observed, append=True) del observed[:] if len(observed) != 0: save_csv(filename, header, observed, append=True)
def statistics(generator, samsize, bins, d): data = [] streams = generator.get_nstreams() for stream in range(streams): generator.stream(stream) observed = observations(generator, samsize, bins, d) chi = _compute_chisquare_statistic(observed, samsize) result = (stream, chi) data.append(result) print_progress(stream, streams) return data
def get_mc_multipliers(modulus): """ Generate a list of MC multipliers w.r.t the specified modulus. :param modulus: a prime number. :return: (List) MC multipliers w.r.t modulus. """ mc_multipliers = [] for i in range(1, modulus): is_mcm = is_mc_multiplier(i, modulus) if is_mcm: mc_multipliers.append(i) print_progress(i, modulus) return mc_multipliers
def find_jumpers(modulus, multiplier, streams): """ Find all jumpers. :param modulus: (int) the modulus. :param multiplier: (int) the multiplier. :param streams: (int) the number of streams. :return: a list of all modulus compatible jumpers. """ jumpers = [] jsize_max = int((modulus + 1) / streams) jumper = 1 for jsize in range(1, jsize_max + 1): jumper = _g(jumper, multiplier, modulus) if is_mc_multiplier(jumper, modulus): jumpers.append((jumper, jsize)) print_progress(jsize, jsize_max) return jumpers
def get_fp_multipliers(modulus): """ Generate a list of FP multipliers w.r.t the specified modulus. :param modulus: a prime number. :return: (List) FP multipliers w.r.t modulus. """ fp_multipliers = [] first_fpm = None for i in range(1, modulus): if is_fp_multiplier(i, modulus): first_fpm = i break print_progress(i, modulus) if first_fpm is not None: if first_fpm == 1: fp_multipliers.append(first_fpm) generated_fpm = generate_fp_multipliers(first_fpm, modulus) fp_multipliers.extend(generated_fpm) return fp_multipliers
def __init__(self, config, name="SIMULATION-CLOUD-CLOUDLET"): """ Create a new simulation. :param config: the configuration for the simulation. :param name: the name of the simulation. """ self.name = name # Configuration - General config_general = config["general"] self.mode = config_general["mode"] # Configuration - Transient Analysis if self.mode is SimulationMode.TRANSIENT_ANALYSIS: self.t_stop = config_general["t_stop"] # self.t_tran = 0 self.batches = INFINITE self.batchdim = 1 self.closed_door_condition = lambda: self.closed_door_condition_transient_analysis( ) self.print_progress = lambda: print_progress( self.calendar.get_clock(), self.t_stop, message="Clock: %d" % (self.calendar.get_clock())) # self.should_discard_transient_data = False # Configuration - Performance Analysis elif self.mode is SimulationMode.PERFORMANCE_ANALYSIS: self.t_stop = INFINITE # self.t_tran = config_general["t_tran"] self.batches = config_general["batches"] self.batchdim = config_general["batchdim"] self.closed_door_condition = lambda: self.closed_door_condition_performance_analysis( ) self.print_progress = lambda: print_progress( self.metrics.n_batches, self.batches, message="Clock: %d | Batches: %d | CurrentBatchSamples: %d" % (self.calendar.get_clock(), self.metrics.n_batches, self. metrics.curr_batchdim), ) # self.should_discard_transient_data = self.t_tran > 0.0 else: raise RuntimeError( "The current version supports only TRANSIENT_ANALYSIS and PERFORMANCE_ANALYSIS" ) # Configuration - Randomization self.rndgen = getattr(rndgen, config_general["random"]["generator"])( config_general["random"]["seed"]) # The simulation metrics self.metrics = SimulationMetrics(self.batchdim) self.confidence = config_general["confidence"] # Configuration - Tasks # Checks that the arrival process is Markovian (currently, the only one supported) if not all(variate is Variate.EXPONENTIAL for variate in [ config["arrival"][tsk]["distribution"] for tsk in TaskScope.concrete() ]): raise NotImplementedError( "The current version supports only exponential arrivals") self.taskgen = Taskgen(rndgen=self.rndgen, config=config["arrival"]) # Configuration - System (Cloudlet and Cloud) config_system = config["system"] self.system = System(rndgen=self.rndgen, config=config_system, metrics=self.metrics) # Configuration - Calendar # Notice that the calendar internally manages: # (i) event sorting, by occurrence time. # (ii) scheduling of only possible events, that are: # (ii.i) possible arrivals, i.e. arrivals with occurrence time lower than stop time. # (ii.ii) departures of possible arrivals. # (iii) unscheduling of events to ignore, e.g. completion in Cloudlet of interrupted tasks. self.calendar = Calendar(t_clock=0.0) # Sampling management self.sampling_file = None # Simulation management self.closed_door = False