Example #1
0
    def __init__(self,
                 root,
                 window_size,
                 audio_loader,
                 examples_per_file=8,
                 blacklist_patterns=None,
                 loaded_files_buffer=10,
                 file_usages=10):
        assert (isinstance(root, str))
        if blacklist_patterns is None:
            blacklist_patterns = []

        self.root = root
        self._window_size = window_size
        self._audio_loader = audio_loader
        self._loaded_files_buffer = loaded_files_buffer
        self._file_usages = file_usages

        self._index = 0
        self.filenames = []
        for ext in ['*.wav', '*.mp3']:
            self.filenames.extend(
                [filename for filename in Path(root).rglob(ext)])

        for pattern in blacklist_patterns:
            self.filenames = self.blacklist(self.filenames, pattern)

        self._loaded_files = dict()
        Worker.call(self._populateLoadedFiles).asDaemon.start()

        np.random.shuffle(self.filenames)
        self._examples_per_file = examples_per_file
Example #2
0
    def _selectFile(self):
        loaded_files = list(self._loaded_files.keys())
        while len(list(self._loaded_files.keys())) == 0:
            self._loadNewFile()
            Worker.call(self._populateLoadedFiles).asDaemon.start()
            loaded_files = list(self._loaded_files.keys())

        return np.random.choice(loaded_files)
Example #3
0
 def start_stop(self):
     """
     Start the GOL simulation on a separate thread or stop it if it was already running
     """
     if not self._gol_model.get_running():
         self._gol_model.set_running(True)
         self._worker = Worker(self.single_step,
                               1 / self._gol_model.get_fps())
         self._worker.start()
     else:
         self._worker.stop()
         self._gol_model.set_running(False)
Example #4
0
    def test_many_download(self):
        q = Queue()
        q2 = Queue()

        selects = {}

        for i in range(10):
            choice = random.choice(list(test_urls.keys()))
            if choice in selects.keys():
                selects[choice] += 1
            else:
                selects[choice] = 1

            q.put((i, {"url": test_urls[choice]["url"], "dest": test_dest}))
            
        worker = Worker({"wait_task": 1, "wait_retry": 0, "max_retry": 1}, q, q2)
        worker.start()
        worker.join()

        self.assertTrue(q.empty())
        for key, value in selects.items():
            path = test_urls[key]["path"]
            dirname = FileManager.get_dirname(path)
            basename = FileManager.get_basename(path)
            for i in range(value):
                filepath = os.path.join(dirname, "{}_{}".format(i, basename)) if i != 0 else path 
                self.assertTrue(os.path.exists(filepath))
                with open(filepath, "rb") as f:
                    data = f.read() 
                    self.assertEqual(hashlib.md5(data).hexdigest(), test_urls[key]["md5"])
                FileManager.remove_file(filepath)
Example #5
0
    def test_sftp_download(self):
        q = Queue()
        q2 = Queue()
        
        q.put((0, {"url": test_urls["sftp_small"]["url"], "dest": test_dest}))
        q.put((1, {"url": test_urls["sftp_pkey_small"]["url"], "dest": test_dest, 
                    "key_filename": test_urls["sftp_pkey_small"]["key_filename"], 
                    "passphrase": test_urls["sftp_pkey_small"]["key_filename"]}))

        worker = Worker({"wait_task": 0}, q, q2)
        worker.start()
        worker.join()

        self.assertTrue(q.empty())
        self.assertTrue(os.path.exists(test_urls["sftp_small"]["path"]))
        self.assertTrue(os.path.exists(test_urls["sftp_pkey_small"]["path"]))
        with open(test_urls["sftp_small"]["path"], "rb") as f:
            data = f.read() 
            self.assertEqual(hashlib.md5(data).hexdigest(), test_urls["sftp_small"]["md5"])
        with open(test_urls["sftp_pkey_small"]["path"], "rb") as f:
            data = f.read() 
            self.assertEqual(hashlib.md5(data).hexdigest(), test_urls["sftp_pkey_small"]["md5"])
        
        FileManager.remove_file(test_urls["sftp_small"]["path"])
        FileManager.remove_file(test_urls["sftp_pkey_small"]["path"])
    def test_fail_download(self):
        works = Queue(maxsize=0)
        progresses = Queue(maxsize=0)

        for i, key in enumerate(test_urls):
            works.put((i + 1, {
                "url": test_urls[key]["url"],
                "dest": test_dest
            }))

        for i in range(4):
            worker = Worker({
                "wait_task": 1,
                "wait_retry": 0,
                "max_retry": 1
            },
                            works,
                            progresses,
                            test_net=True,
                            name="worker{}".format(i))
            worker.setDaemon(True)
            worker.start()

        visualizer = Visualizer(4, progresses, name="visualizer")
        visualizer.start()

        works.join()
        visualizer.join()

        self.assertTrue(works.empty())
        self.assertTrue(progresses.empty())
        self.assertEqual(visualizer.fail, 4)
        self.assertEqual(visualizer.fail, visualizer.task)
        self.assertEqual(len(visualizer.results), 4)
        self.assertTrue(len(os.listdir(test_dest)) == 0)
Example #7
0
    def ssh_connect(self, args):
        ssh = SSHClient()
        ssh.set_missing_host_key_policy(paramiko.AutoAddPolicy())
        LOG.debug("ssh connect args: {}".format(args))
        dst_addr = args[:2]
        LOG.info("Connecting to {}:{}".format(*dst_addr))
        try:
            ssh.connect(*args, timeout=6)
        except socket.error:
            raise ValueError('Unable to connect to {}:{}'.format(*dst_addr))
        except paramiko.BadAuthenticationType:
            raise ValueError('Bad authentication type.')
        except paramiko.AuthenticationException:
            raise ValueError('Authentication failed.')
        except paramiko.BadHostKeyException:
            raise ValueError('Bad host key.')

        chan = ssh.invoke_shell(term='xterm')
        chan.setblocking(0)
        worker = Worker(self.loop, ssh, chan, dst_addr)
        worker.encoding = self.get_default_encoding(ssh)
        return worker
Example #8
0
    def test_404_download(self):
        q = Queue()
        q2 = Queue()
        q.put((0, {"url": "http://google.com/blah", "dest": test_dest}))

        worker = Worker({"wait_task": 0, "wait_retry": 0}, q, q2)
        worker.start()
        worker.join()

        self.assertTrue(q.empty())
        q2.get()
        self.assertIsInstance(q2.get()[1]["error"], requests.exceptions.HTTPError)
Example #9
0
    def test_no_dest(self):
        q = Queue()
        q2 = Queue()
        q.put((0, {"url": "url"}))
        q.put((1, {"url": "url", "dest": ""}))

        worker = Worker({"wait_task": 0}, q, q2)
        worker.start()
        worker.join()

        self.assertTrue(q.empty())
        self.assertIsInstance(q2.get()[1]["error"], NoDestinationPathException)
        self.assertIsInstance(q2.get()[1]["error"], NoDestinationPathException)
Example #10
0
    def test_unsupport_protocol(self):
        q = Queue()
        q2 = Queue()
        q.put((0, {"url": "url", "dest": test_dest}))
        q.put((1, {"url": "abc://path", "dest": test_dest}))

        worker = Worker({"wait_task": 0}, q, q2)
        worker.start()
        worker.join()

        self.assertTrue(q.empty())
        q2.get()
        self.assertIsInstance(q2.get()[1]["error"], UnsupportedProtocolException)
        q2.get()
        self.assertIsInstance(q2.get()[1]["error"], UnsupportedProtocolException)
    def test_success_download(self):
        works = Queue(maxsize=0)
        progresses = Queue(maxsize=0)

        for i, key in enumerate(test_urls):
            works.put((i + 1, {
                "url": test_urls[key]["url"],
                "dest": test_dest
            }))

        for i in range(4):
            worker = Worker({
                "wait_task": 1,
                "wait_retry": 0,
                "max_retry": 1
            },
                            works,
                            progresses,
                            name="worker{}".format(i))
            worker.setDaemon(True)
            worker.start()

        visualizer = Visualizer(4, progresses, name="visualizer")
        visualizer.start()

        works.join()
        visualizer.join()

        self.assertTrue(works.empty())
        self.assertTrue(progresses.empty())
        self.assertEqual(visualizer.success, 4)
        self.assertEqual(visualizer.success, visualizer.task)
        self.assertTrue(not visualizer.results)
        for key in test_urls:
            self.assertTrue(os.path.exists(test_urls[key]["path"]))
            with open(test_urls[key]["path"], "rb") as f:
                data = f.read()
                self.assertEqual(
                    hashlib.md5(data).hexdigest(), test_urls[key]["md5"])

            FileManager.remove_file(test_urls[key]["path"])
Example #12
0
    def test_fail_download(self):
        q = Queue()
        q2 = Queue()

        selects = {}

        for i in range(10):
            choice = random.choice(list(test_urls.keys()))
            if choice in selects.keys():
                selects[choice] += 1
            else:
                selects[choice] = 1

            q.put((i, {"url": test_urls[choice]["url"], "dest": test_dest}))
            
        worker = Worker({"wait_task": 1, "wait_retry": 0, "max_retry": 1}, q, q2, test_net=True)
        worker.start()
        worker.join()

        self.assertTrue(q.empty())
        self.assertTrue(len(os.listdir(test_dest)) == 0)
Example #13
0
 def _task_action(self, f):
     w = Worker(driver=self.driver,
                device=self.device.get("deviceName"),
                reset=self.device.get("reset", None))
     w.execute(f=f)
Example #14
0
    saver = tf.train.Saver(max_to_keep=3, var_list=GLOBAL_AC.getVars+[global_step])
    GLOBAL_AC.InitializeVariablesFromFile(saver,MODEL_PATH)

    progbar = tf.keras.utils.Progbar(None, unit_name='Training',stateful_metrics=["Reward"])
    writer = tf.summary.FileWriter(LOG_PATH,graph=sess.graph)

    # Create workers
    workers = []
    for i in range(settings["NumberENV"]):
        i_name = 'W_%i' % i   # worker name
        network = Network(settings["NetworkConfig"],nActions,netConfigOverride,scope=i_name)
        Method = GetFunction(settings["Method"])
        localNetwork = Method(network,sess,stateShape=dFeatures,actionSize=nActions,scope=i_name,HPs=settings["NetworkHPs"],globalAC=GLOBAL_AC,nTrajs=nTrajs)
        localNetwork.InitializeVariablesFromFile(saver,MODEL_PATH)
        env,_,_,_ = CreateEnvironment(envSettings,multiprocessing=1)
        workers.append(Worker(localNetwork,env,sess,global_step,global_step_next,settings,progbar,writer,MODEL_PATH,saver))

InitializeVariables(sess) #Included to catch if there are any uninitalized variables.

COORD = tf.train.Coordinator()
worker_threads = []
for i,worker in enumerate(workers):
    if i==0:
        job = lambda: worker.work(COORD,render=args.render)
    else:
        job = lambda: worker.work(COORD)
    t = threading.Thread(target=job)
    t.start()
    worker_threads.append(t)
COORD.join(worker_threads)
Example #15
0
 def _usedFilename(self, filename):
     self._loaded_files[filename][0] = self._loaded_files[filename][0] + 1
     if self._loaded_files[filename][0] >= self._file_usages:
         del self._loaded_files[filename]
         Worker.call(self._loadNewFile).asDaemon.start()
Example #16
0
last_minute = (work_hours + start_hour) * 60

prob_incoming_clients = [0.71, 0.23, 0.05, 0.01]

prob_service_type = [0.5, 0.4, 0.1]
avg_service_duration = [12, 15, 25]
stdev_service_duration = [0.8, 1.0, 3.0]

num_of_workers = 4
N = 0
clients_arrival_interval = 5
maxrow = 8

EVENTS = []
ROW = []
WORKERS = [Worker() for _ in range(num_of_workers)]

time = start_hour * 60
ncl = 0
N = 0
Nserved = 0
Nwaited = 0
Twaited = 0

EVENTS = []
ev = Event(time, 1, (1, ))
EVENTS = fill_events(EVENTS, ev)
ev = Event(last_minute, 8, (1, ))
EVENTS = fill_events(EVENTS, ev)

t = time + int(round(pr.exponential(clients_arrival_interval)))
        # Quit if inputs is empty
        if not inputs:
            raise Exception("No inputs given")

        works = Queue(maxsize=0)
        progresses = Queue(maxsize=0)

        # Put work to Queue
        for i, info in enumerate(inputs.values()):
            works.put((i + 1, info))

        # Setup workers
        num_threads = min(config.get("max_worker", 5), len(inputs))
        for i in range(num_threads):
            worker = Worker(config,
                            works,
                            progresses,
                            name="worker{}".format(i))
            worker.setDaemon(True)
            worker.start()

        # Setup visualizer
        visualizer = Visualizer(len(inputs), progresses, name="visualizer")
        visualizer.start()

        # Wait until works Queue and visualizer finished
        works.join()
        visualizer.join()

    except FileNotFoundError as errf:
        print(errf)
    except Exception as e:
Example #18
0
class MainController:
    """
    Class representing the controller of the MVC pattern. It connects to the GUI signals to catch the user interactions
    and modify the GOL state model accordingly.
    """
    def __init__(self, application: QApplication, main_window: MainWindow,
                 gol_model: GOLModel):
        self._application = application
        application.aboutToQuit.connect(self._stop_worker_on_app_closing)

        self._main_window = main_window
        main_window.connect_to_button_clear(self.clear_grid)
        main_window.connect_to_button_load(self.load_custom_pattern)
        main_window.connect_to_button_play(self.start_stop)
        main_window.connect_to_button_save(self.save_pattern)
        main_window.connect_to_button_step(self.single_step)
        main_window.connect_to_combo_patterns(self.select_example_pattern)
        main_window.connect_to_radio_age(self.toggle_show_cell_age)
        main_window.connect_to_slider_speed(self.set_speed)
        main_window.grid_widget.connect_to_cell_clicked(self.toggle_cell)

        self._gol_model = gol_model
        self._worker = None

        # Variables for the grid update
        self._conv_filter = np.ones((3, 3))
        self._conv_filter[1, 1] = 0

    def clear_grid(self):
        """
        Clear the GOL grid bringing it back to its initial state (depending on the chosen pattern)
        :return:
        """
        self.select_example_pattern(self._gol_model.get_base_pattern())
        self._main_window.show_message_on_status_bar("Grid cleared")

    def load_custom_pattern(self):
        """
        Load a pattern from a chosen file into the current GOL state
        :return:
        """
        file_path = QFileDialog.getOpenFileName(
            self._main_window,
            "Load pattern file",
            filter="Pattern File (*.cells)")[0]
        if file_path:
            self._main_window.reset_combo_patterns()
            if self._load_file(file_path):
                self._main_window.show_message_on_status_bar("Pattern loaded")

    def _load_file(self, file_path: str):
        """
        Helper method to load a pattern from a .cells file (plain text format).

        :param file_path: Path of the pattern file
        :return: False if the file is invalid or the pattern do not fit the current grid, otherwise True
        """
        grid_pattern = patterns.read_pattern_file(file_path)

        if grid_pattern is None:
            self._main_window.show_error_message("Invalid pattern file")
            return False
        else:
            grid_height, grid_width = self._gol_model.get_grid_size()
            pattern_height, pattern_width = grid_pattern.shape

            # If the pattern is bigger than the grid show an error
            if pattern_height > grid_height or pattern_width > grid_width:
                self._main_window.show_error_message(
                    "The loaded pattern is bigger than the available grid")
                return False
            else:
                # Copy the pattern at the center of a blank grid
                new_grid = np.zeros(self._gol_model.get_grid_size(), np.uint8)
                v_margin = (grid_height - pattern_height) // 2
                h_margin = (grid_width - pattern_width) // 2
                new_grid[v_margin:v_margin + pattern_height,
                         h_margin:h_margin + pattern_width] = grid_pattern

                self._gol_model.set_grid_as_numpy(new_grid)
                return True

    def save_pattern(self):
        """
        Save the current grid state in a .cells file as a reloadable state
        :return:
        """
        file_path = QFileDialog.getSaveFileName(
            self._main_window,
            "Save pattern file",
            filter="Pattern File (*.cells)")[0]
        if file_path:
            patterns.save_pattern_file(file_path,
                                       self._gol_model.get_grid_as_numpy())
            self._main_window.show_message_on_status_bar("Pattern saved")

    def select_example_pattern(self, pattern_name):
        """
        Load a predefined pattern chosen from the provided list
        :param pattern_name: The name of the pattern to load (it is the same of its file name)
        :return:
        """
        self._gol_model.set_base_pattern(pattern_name)
        # The selected pattern is the custom one: restart from a blank grid
        if pattern_name == "Custom":
            new_grid = np.zeros(self._gol_model.get_grid_size(), np.uint8)
            self._gol_model.set_grid_as_numpy(new_grid)
        else:
            file_path = os.path.join(config.ROOT_PATH, config.FOLDER_PATTERNS,
                                     pattern_name + ".cells")
            if not self._load_file(file_path):
                # Something went wrong during the pattern loading: select the Custom pattern
                self._main_window.reset_combo_patterns()

    def set_speed(self, speed):
        """
        Change the simulation speed
        :param speed: The simulation speed in FPS
        :return:
        """
        self._gol_model.set_fps(speed)
        if self._gol_model.get_running():
            self._worker.set_wait_time(1 / self._gol_model.get_fps())

    def single_step(self):
        """
        Performs an update step of the grid applying the Game of Life rules.
        Besides calculating dead and living cells at the next time step, it also calculates the age of each cell
        (how many time steps they are alive). The age ranges from 0 (dead) to 255 (ancient)
        """
        grid_curr_age = self._gol_model.get_grid_as_numpy()
        grid_curr_alive = grid_curr_age.astype(np.bool).astype(np.uint8)

        # Use convolution to calculate the number of neighbors for each cell
        grid_neighbors = ndimage.convolve(grid_curr_alive,
                                          self._conv_filter,
                                          mode="constant",
                                          cval=0)

        # Calculate which cells to give birth: a dead cell is born when it has exactly three neighbors
        grid_newborns = grid_neighbors == 3
        grid_newborns = np.logical_and(grid_newborns,
                                       np.logical_not(grid_curr_alive))

        # Calculate which cells survive: a living cell survive when it has two or three neighbors
        grid_survived = np.logical_and(grid_neighbors >= 2,
                                       grid_neighbors <= 3)
        grid_survived = np.logical_and(grid_survived, grid_curr_alive)

        # Calculate the living cells at the next step merging survived and newborn cells
        grid_next = np.logical_or(grid_newborns,
                                  grid_survived).astype(np.uint8)

        # Calculate the age of the cells in the new grid
        grid_next = grid_curr_age * grid_next + grid_next
        np.putmask(
            grid_next, grid_curr_age == 255, 255
        )  # This is necessary to avoid the overflow handling by numpy and cap the array values to 255

        self._gol_model.set_grid_as_numpy(grid_next)

    def start_stop(self):
        """
        Start the GOL simulation on a separate thread or stop it if it was already running
        """
        if not self._gol_model.get_running():
            self._gol_model.set_running(True)
            self._worker = Worker(self.single_step,
                                  1 / self._gol_model.get_fps())
            self._worker.start()
        else:
            self._worker.stop()
            self._gol_model.set_running(False)

    def _stop_worker_on_app_closing(self):
        if self._worker is not None:
            self._worker.stop()

    def toggle_show_cell_age(self, show_cell_age: bool):
        self._gol_model.set_show_cell_age(show_cell_age)

    def toggle_cell(self, cell_coord: tuple):
        """
        Toggle the state of the specified cell
        :param cell_coord: A tuple containing the cell coordinates as (row, column)
        """

        grid = self._gol_model.get_grid_as_numpy()
        row, col = cell_coord
        grid[row, col] = 0 if grid[row, col] else 1
        self._gol_model.set_grid_as_numpy(grid)
Example #19
0
import time
import json
import gmqtt

from utils.utils import run_event_loop, STOP, init_client
from utils.consts import WORKER_REGISTRED_TOPIC, WORKER_REGISTRATION_TOPIC, WORKER_UNREGISTER_TOPIC, \
    WORKER_RESULT_TOPIC, BALANCER_WORKER_TOPIC
from utils.worker import Worker

worker = Worker()


async def on_message(client, topic, payload, qos, properties):
    if topic == WORKER_REGISTRED_TOPIC:
        data = json.loads(payload.decode('utf-8'))
        worker_hex = data.get("worker_hex")
        worker_num = data.get('worker_num')

        if not worker.is_registered() and worker.worker_hex == worker_hex:
            worker.register(worker_num)
            client.subscribe(f'{worker.balancer_topic}', qos=1)
    elif topic == worker.balancer_topic and worker.is_registered():
        print(f'Worker {worker.number}. Publish.',
              f"Topic: '{worker.result_topic}'.", 'Payload:', payload)
        client.publish(worker.result_topic, payload, qos=1)
    return 0


async def main(broker_host, token):
    will_message = gmqtt.Message(WORKER_UNREGISTER_TOPIC,
                                 worker.worker_hex,
Example #20
0
# Copyright (c) Microsoft Corporation.
# Licensed under the MIT license.
import os, sys

# Add the wmg_agent dir to the system path.
sys.path.append(os.path.dirname(os.path.abspath(__file__)))

# No need for argparse. All settings are contained in the spec file.
num_args = len(sys.argv) - 1
if num_args != 1:
    print('run.py accepts a single argument specifying the runspec.')
    exit(1)

# Read the runspec.
from utils.spec_reader import SpecReader

SpecReader(sys.argv[1])

# Execute the runspec.
from utils.worker import Worker

worker = Worker()
worker.execute()