Example #1
0
def compute_safeties_by_list(a_list, max_of_validator, n):

    safeties = Manager().dict()
    #
    procs = list()
    # for a in a_list:
    #     proc = Process(target=safety_of_consensus, args=(a, max_of_validator, n, safeties))
    #     procs.append(proc)
    #     proc.start()
    #
    # for proc in procs:
    #     proc.join()

    # return safeties.copy()

    for a_ in a_list:
        procs = list()
        # print(a_)
        for a in a_:

            proc = Process(target=safety_of_consensus,
                           args=(a, max_of_validator, n, safeties))
            procs.append(proc)
            proc.start()

        # wating for proc
        for proc in procs:
            proc.join()

    return safeties.copy()
Example #2
0
class EconomyFrame(Frame):
    def __init__(self, map_limits, queue, shutdown):

        Frame.__init__(self)

        self.circle = dict()

        self.map_limits = map_limits

        self.map_type_of_agent = Manager().dict()

        self.agent_type_color_map = \
            {0: "black",
             1: "blue",
             2: "green"}
        self.information_getter = InformationGetter(self.map_type_of_agent,
                                                    queue, shutdown)
        self.information_getter.start()

        self.timer = QTimer()
        self.timer.setInterval(0)
        self.timer.timeout.connect(self.repaint)
        self.timer.start()

    def draw(self):

        map_type_of_agent = self.map_type_of_agent.copy()

        for position, to_print in map_type_of_agent.items():

            rectangle = QRect(position[0] * self.circle["width"],
                              position[1] * self.circle["height"],
                              self.circle["width"], self.circle["height"])

            agent_type = to_print[0]
            agent_object = to_print[1]

            set_angle = 0
            size = 180 * 16

            self.painter.setBrush(
                self.brush[self.agent_type_color_map[agent_type]])
            self.painter.drawPie(rectangle, set_angle, size)

            set_angle = 180 * 16
            size = 180 * 16
            self.painter.setBrush(
                self.brush[self.agent_type_color_map[agent_object]])
            self.painter.drawPie(rectangle, set_angle, size)

            # self.painter.drawEllipse(rectangle)

    def adapt_size_to_window(self, window_size):

        for i in ["width", "height"]:
            self.circle[i] = window_size[i] / float(self.map_limits[i])

        self.repaint()
class EconomyFrame(Frame):

    def __init__(self, map_limits, queue, shutdown):

        Frame.__init__(self)

        self.circle = dict()

        self.map_limits = map_limits

        self.map_type_of_agent = Manager().dict()

        self.agent_type_color_map = \
            {0: "black",
             1: "blue",
             2: "green"}
        self.information_getter = InformationGetter(self.map_type_of_agent, queue, shutdown)
        self.information_getter.start()

        self.timer = QTimer()
        self.timer.setInterval(0)
        self.timer.timeout.connect(self.repaint)
        self.timer.start()

    def draw(self):

        map_type_of_agent = self.map_type_of_agent.copy()
        
        for position, to_print in map_type_of_agent.items():
                
            rectangle = QRect(position[0]*self.circle["width"],
                              position[1]*self.circle["height"],
                              self.circle["width"],
                              self.circle["height"])

            agent_type = to_print[0]
            agent_object = to_print[1]

            set_angle = 0
            size = 180*16

            self.painter.setBrush(self.brush[self.agent_type_color_map[agent_type]])
            self.painter.drawPie(rectangle, set_angle, size)

            set_angle = 180 * 16
            size = 180 * 16
            self.painter.setBrush(self.brush[self.agent_type_color_map[agent_object]])
            self.painter.drawPie(rectangle, set_angle, size)

            # self.painter.drawEllipse(rectangle)

    def adapt_size_to_window(self, window_size):

        for i in ["width", "height"]:
            self.circle[i] = window_size[i] / float(self.map_limits[i])
            
        self.repaint()
Example #4
0
class SimController:
    """ wraps RunnableSimulation to be controlled by a sim-server """
    def __init__(self, simulation: RunnableSimulation) -> None:

        self.simulation = simulation

        self.sim_process: Optional[Process] = None
        self.shared_state: dict = Manager().dict()
        self.should_run = Value(c_bool, False)
        self.factor: SyncedFloat = SyncedFloat._create_factor_instance()

    def start_simulation_process(self):
        self.reset_sim()
        self._setup_sim()
        self.sim_process.start()
        return "simulation process started"

    def _setup_sim(self):
        self.shared_state = Manager().dict()
        self.should_run.value = True
        self.sim_process = Process(
            target=self.simulation.simulate,
            args=(
                self.shared_state,
                self.should_run,
                self.factor,
            ),
        )

    def pause_sim(self):
        self.should_run.value = False
        return "paused"

    def resume_sim(self):
        self.should_run.value = True
        return "resumed"

    def reset_sim(self):
        if self.sim_process is not None and self.sim_process.is_alive():
            self.sim_process.terminate()
            self.sim_process = None
        self.should_run.value = False
        return "reset"

    def get_state_dumps(self):
        return json.dumps(self.shared_state.copy())

    def set_rt_factor(self, value: float):
        return "set factor to " + str(self.factor.set_value(value))

    def get_sim_width(self) -> int:
        return self.simulation.width

    def get_sim_height(self) -> int:
        return self.simulation.height
Example #5
0
def compute_test_safeteis_by_list(a_list, max_of_validator, n):

    safeties = Manager().dict()
    #
    procs = list()
    for idx, a in enumerate(a_list):
        proc = Process(target=test_caluclate,
                       args=(a, max_of_validator, n, safeties, idx))
        procs.append(proc)
        proc.start()

    for proc in procs:
        proc.join()

    return safeties.copy()
Example #6
0
def cat_score(eula, cat, ret_var, thread_semaphore):

    # Create our own manager for our subprocesses
    ret_vars = Manager().dict()

    # Extract the heuristics from the class
    heuristics = cat.get_heuristics()

    # Create a process declaration for each category in the above array
    processes = []
    for heur in heuristics.keys():
        # Describe the process, giving the eula
        processes.append(
            Process(target=heur_score,
                    args=(heur, eula, thread_semaphore, ret_vars)))

    # Start processes in order of above array
    for process in processes:
        # Aquire semaphore before starting
        thread_semaphore.acquire()
        # Start process once sempahore aquired
        process.start()

    # Join each process so we don't exit until all are done
    for process in processes:
        process.join()

    # Processing is done, so convert into regular dict
    ret_vars = ret_vars.copy()

    # Calculated weighted score for each return, but only if the score is valid.  Convert to float to maintain decimals until return
    weighted_scores = {
        k: float(v['score']) / v['max'] * heuristics[k]
        for k, v in ret_vars.iteritems() if v['score'] >= 0
    }
    # Sum the weights of the scores we are using to calculate overall
    sum_weights = sum(
        {x: heuristics[x]
         for x in heuristics.keys() if x in weighted_scores}.values())
    # Return the overall weighted score which exists on a scale of [0-4]
    weighted_score = int((4 * sum(weighted_scores.values()) /
                          sum_weights) if sum_weights > 0 else -1)

    # Map the class definitions to their names for returning
    ret_var[cat.__name__.lower()] = {
        'weighted_score': weighted_score,
        'heuristics': ret_vars.values()
    }
Example #7
0
def analyze_eula(eula):
    # Categories to analyse, these will be done in parallel
    categories = [
        formal.Formal, procedural.Procedural, substantive.Substantive
    ]

    # Create a semaphore to limit number of running processes
    running = BoundedSemaphore(int(os.getenv('analyze_max_threads', 1)))

    # We cannot return variables from threads, so instead create managed dictionary to pass objects back through
    ret_vars = Manager().dict()

    # Create a process declaration for each category in the above array
    processes = []
    for cat in categories:
        # Allocate a space in the dictionary for their return values
        ret_vars[cat.__name__.lower()] = None
        # Describe the process, giving the eula (us), the semaphore, and the return dict
        processes.append(
            Process(target=cat_score, args=(eula, cat, ret_vars, running)))

    # Start processes in order of above array
    for process in processes:
        # Start process once sempahore aquired
        process.start()

    # Join each process so we don't exit until all are done
    for process in processes:
        process.join()

    # De-parallelize dict now that we are done
    ret_vars = ret_vars.copy()

    # Calculate overall score by summing the weighted score of each category then dividing by number of categories
    # i.e. simple average
    overall_score = int(
        sum(map(lambda x: x['weighted_score'], ret_vars.values())) /
        len(ret_vars))
    grades = ['F', 'D', 'C', 'B', 'A']

    return {
        'title': eula.title,
        'url': eula.url,
        'overall_score': overall_score,
        'overall_grade': grades[overall_score],
        'categories': ret_vars
    }
Example #8
0
    class QdiscRedStatCollector:
        def __init__(self, get_stats_func, parent):
            self._get_stats_func = get_stats_func
            self._stop_flag = Value("b", True)
            self._stats = Manager().dict()
            self._p = None
            self._old_stats = {}
            self._backlogs = None
            self._parent = parent

        def _collect_qdisc_red_stats(self, stop_flag, stats, backlogs):
            while not stop_flag.value:
                new_stats = self._get_stats_func(self._parent)
                if new_stats == {}:
                    continue
                if new_stats["tx_packets"] > stats["tx_packets"]:
                    stats.update(new_stats)
                    backlogs.append(stats["backlog"])
                sleep(0.25)

        def start(self):
            if not self._stop_flag.value:
                return

            self._stats.clear()
            self._backlogs = Manager().list()
            self._old_stats = self._get_stats_func(self._parent)
            self._stats.update(self._old_stats)
            self._stop_flag.value = False
            self._p = Process(target=self._collect_qdisc_red_stats,
                             args=(self._stop_flag, self._stats, self._backlogs))
            self._p.start()

        def stop(self):
            if self._stop_flag.value:
                return

            self._stop_flag.value = True
            self._p.join(0)
            backlogs = list(self._backlogs)
            stats = self._stats.copy()
            for key in self._old_stats.keys():
                if key not in stats:
                    continue
                if type(stats[key]) in (int, long):
                    stats[key] -= self._old_stats[key]
            return (backlogs, stats)
Example #9
0
    def multiproecess_parse(self):

        img_names = os.listdir(self.image_dir)

        results = Manager().dict()
        pool = Pool(os.cpu_count())

        total_imags = len(img_names)
        bucket_size = total_imags // os.cpu_count() + 1

        for i in range(os.cpu_count()):
            pool.apply_async(parse,
                             args=(self.http_url, results,
                                   img_names[i * bucket_size:(i + 1) *
                                             bucket_size]))

        pool.close()
        pool.join()

        print(results)

        with open('findings.json', 'w') as f:
            json.dump(results.copy(), f)
print "total_rented_modelruns_allowed", total_rented_modelruns_allowed
print "budget_period_min", budget_period_min
print "budget_period_sec", budget_period_sec
print "time_interval_sec", time_interval_sec
print "mu_own", mu_own
print "mu_own", mu_rent
print "mu", mu
print "lamda", lamda

# print '\nTotal Money: ', total_budget, '$'
# print 'Rate of Container: ', instance_rate,' $/hr'
# print 'Average time for modelrun : ', tavg_modelrun_secs,' sec'
# print 'Maximum rented modelruns  : ', total_rented_modelruns_allowed
# print 'Calculated time interval  : ', time_interval_sec,' sec'
print 'Total Models Allowed ', total_rented_modelruns_allowed

thread.start_new_thread(poisson_job_generator, ())
thread.start_new_thread(rentedContainerCreation, ())
thread.start_new_thread(plot_completedjobs_time, ())

# Thread(target = poisson_job_generator).start()
# Thread(target = rentedContainerCreation).start()
# Thread(target = plot_completedjobs_time).start()

while True:
    for jobId in taskMap.copy():
        if celery.AsyncResult(jobId).state == 'SUCCESS':
            if jobId not in completedjobs:
                completedjobs.append(jobId)
                taskMap.pop(jobId, 0)
Example #11
0
class MpVarMass:
    def __init__(self):
        self._lc = Manager().Lock()
        self._vrs = Manager().dict()

    def lock_func(self, f, *args, **kwargs):
        self._lc.acquire(blocking=True)
        ret = f(*args, **kwargs)
        self._lc.release()
        return ret

    def _set_var(self, var: Variable):
        vr = self._vrs.copy()
        if var.name not in vr:
            return False
        self._vrs[var.name] = var
        return True

    def set_var(self, var: Variable):
        return self.lock_func(self._set_var, var)

    def _add_var(self, var: Variable):
        vr = self._vrs.copy()
        if var.name in vr:
            return False
        self._vrs[var.name] = var
        return True

    def add_var(self, var: Variable):
        return self.lock_func(self._add_var, var)

    def _del_var(self, name):
        vr = self._vrs.copy()
        if name in vr:
            self._vrs.pop(name)
            return True
        return False

    def del_var(self, name):
        return self.lock_func(self._del_var, name)

    def _sim_iter(self, time_spend):
        vr = self._vrs.copy()
        tsp = 0.2
        for a in vr:
            wt = vr[a].simulate(time_spend)
            print(a, vr[a].value)
            self._vrs[a] = vr[a]
            if wt < tsp:
                tsp = wt
        return tsp

    def sim(self):
        swt = 0
        while True:
            swt = self.lock_func(self._sim_iter, swt)
            time.sleep(swt)

    def var_list(self):
        self._lc.acquire()
        vr = self._vrs.copy()
        self._lc.release()
        rm = {}
        for a in vr:
            rm[a] = float(vr[a].value)
        return rm
Example #12
0
class Master(object):

    # Generating a constructor for the class:
    def __init__(self, args, masterfilepath, num_of_total_frames, output_directory):
        self.args = args
        self.masterfilepath = masterfilepath
        self.frames_per_degree = args.framesperdegree
        self.frames_per_well = int(args.oscillation * args.framesperdegree)
        self.frames_to_process = int(args.oscillationperwell * args.framesperdegree)
        self.total_frames = num_of_total_frames
        self.output = output_directory
        self.xdsparams = xds.get_xds_params(self.args)
        self.detparams = xds.get_detector_params(self.args.detector)


        # Variables defined within class:
        self.master_dictionary = Manager().dict()
        self.new_list = []

        # Functions called within class:
        self.create_master_directory(self.masterfilepath) # creating masterfile directories
        self.master_directory_path = self.get_master_directory_path(self.masterfilepath)

        # creating datawell directory and run XDS in it (with parallelization)
        self.run()
        self.write_to_json()

    def run(self):
        p = Pool()
        for framenum in range(1,self.total_frames,self.frames_per_well):
            p.apply_async(self.create_and_run_data_wells, args=(framenum, self.master_dictionary))
        p.close()
        p.join()

    def create_master_directory(self, masterfilepath):
        # Generate a name for masterfile directory:
        suffix = masterfilepath.stem.replace('_master','')
        new_dir_path = Path(self.output / suffix)
        # Create a masterfile directory:
        try:
            new_dir_path.mkdir()
        except:
            sys.exit("Creation of the directory {} failed.".format(new_dir_path))

    def create_and_run_data_wells(self, framenum, md):
        # Generate datawell directories by creating instances of class called 'Datawell' (from datawell.py):
        print("Processing frames {}-{}...".format(framenum,framenum+self.frames_to_process-1))
        dw = datawell.Datawell(framenum, framenum+self.frames_to_process-1, self.master_directory_path,
                                    self.masterfilepath, self.xdsparams, self.detparams)
        dw.setup_datawell_directory()
        dw.gen_XDS()
        dw.run()
        #dw.check_and_rerun()
        md[dw.getframes()] = dw.get_dw_dict()


    def get_master_directory_name(self,masterfilepath):
        return masterfilepath.name.strip('_master.h5')

    def get_master_directory_path(self, masterfilepath):
         # Return master directory path. Used in the above function.
        suffix = masterfilepath.stem.replace('_master','')
        return Path(self.output / suffix)

    def get_master_dictionary(self):
        return self.master_dictionary.copy()

    def write_to_json(self):
        Path(self.master_directory_path / 'results_{b}.json'.format(b=self.get_master_directory_name(self.masterfilepath))).write_text(
            json.dumps(self.master_dictionary.copy(), indent=2, sort_keys=True, cls=JSONEnc)
        )