Esempio n. 1
0
def state_estimation_start(user_id):
    """
    Actually start the process of state estimation.

    This saves a file called 'params.json' in /tmp/uncurl/<user_id>
    containing all parameters used in state estimation.
    """
    path = os.path.join(current_app.config['USER_DATA_DIR'], user_id)
    gene_names_file = os.path.join(path, 'gene_names.txt')
    if not os.path.exists(gene_names_file):
        gene_names_file = None
    # TODO: deal with init here - make note if it's qualitative or
    # quantitative
    # run qualNorm???
    init_path = os.path.join(path, 'init.txt')
    if not os.path.exists(init_path):
        init_path = None
    # load json params
    with open(os.path.join(path, 'preprocess.json')) as f:
        preprocess = json.load(f)
    for key in request.form.keys():
        preprocess[key] = request.form[key]
    # params.json contains all input parameters to the state estimation, as well as all stats from preprocess.json.
    with open(os.path.join(path, 'params.json'), 'w') as f:
        json.dump(preprocess, f)
    P = Process(target=state_estimation_thread,
                args=(user_id, gene_names_file, init_path, path, preprocess,
                      current_app.config.copy()))
    P.start()
    return redirect(url_for('views.state_estimation_result', user_id=user_id))
Esempio n. 2
0
    def start(self):
        playingfile = self.get_playing_file()
        if not (playingfile["file"].startswith(uni_join(const.libpath, const.provider)) or
                playingfile["file"].startswith(uni_join(const.addonpath, "resources"))):
            return
        kodi.log("start onPlayBackStarted")
        self.koala_playing = True

        self.player = Player()

        self.remote = None
        if kodi.settings["remote"]:
            self.remote = remote.Remote()
            self.remote.run(player=self.player)

        self.player.connect()

        if "NRK nett-TV.htm" not in playingfile["file"]:
            self.player.get_player_coord()
            self.player.wait_player_start()
            self.player.toggle_fullscreen()

        if playingfile["type"] == "episode":
            thread = Thread(target=self.monitor_watched, args=[playingfile])
            thread.start()

        kodi.log("finished onPlayBackStarted")
Esempio n. 3
0
def process_updates():
    """
    Decides which type the update is and routes it to the appropriate route_updates
    method and launches a thread for the run_extensions method.
    """
    signal.signal(signal.SIGINT, signal.SIG_IGN)
    plugin_http = urllib3.PoolManager(cert_reqs='CERT_REQUIRED', ca_certs=certifi.where())
    plugin_http.timeout = urllib3.Timeout(connect=1.0)
    plugin_http.retries = 3
    update_router = RouteMessage(PLUGINS, plugin_http, GET_ME, CONFIG)
    while RUNNING.value:
        try:
            update = MESSAGE_QUEUE.get_nowait()
        except queue.Empty:
            time.sleep(SLEEP_TIME)
            continue
        extension_thread = ThreadProcess(target=run_extensions, args=(update, ))
        extension_thread.start()
        if 'message' in update:
            update_router.route_update(update['message'])
        elif 'edited_message' in update:
            update_router.route_update(update['edited_message'])
        elif 'callback_query' in update:
            route_callback_query(PLUGINS, GET_ME, CONFIG, plugin_http, update['callback_query'])
        elif 'inline_query' in update:
            route_inline_query(PLUGINS, GET_ME, CONFIG, plugin_http, update['inline_query'])
        extension_thread.join()
Esempio n. 4
0
    def __init__(self, ip, port, uri_opener, proxy_handler=w3afProxyHandler,
                 proxy_cert='core/controllers/daemons/mitm.crt'):
        '''
        :param ip: IP address to bind
        :param port: Port to bind
        :param uri_opener: The uri_opener that will be used to open
            the requests that arrive from the browser
        :param proxy_handler: A class that will know how to handle
            requests from the browser
        :param proxy_cert: Proxy certificate to use, this is needed
            for proxying SSL connections.
        '''
        Process.__init__(self)
        self.daemon = True
        self.name = 'ProxyThread'
        
        # Internal vars
        self._server = None
        self._proxy_handler = proxy_handler
        self._running = False
        self._uri_opener = uri_opener

        # User configured parameters
        self._ip = ip
        self._port = port
        self._proxy_cert = proxy_cert

        # Start the proxy server
        try:
            self._server = ProxyServer((self._ip, self._port),
                                       self._proxy_handler)
        except socket.error, se:
            raise w3afProxyException('Socket error while starting proxy: "%s"'
                                     % se.strerror)
class Ticker(object):

    def __init__(self, api, interval=1):
        self.api = api
        self.db = MongoClient().poloniex['ticker']
        self.interval = interval

    def updateTicker(self):
        tick = self.api.returnTicker()
        for market in tick:
            self.db.update_one({'_id': market},
                               {'$set': tick[market]},
                               upsert=True)
        logger.info('Ticker updated')

    def __call__(self):
        return list(self.db.find())

    def run(self):
        self._running = True
        while self._running:
            self.updateTicker()
            sleep(self.interval)

    def start(self):
        self._thread = Thread(target=self.run)
        self._thread.daemon = True
        self._thread.start()
        logger.info('Ticker started')

    def stop(self):
        self._running = False
        self._thread.join()
        logger.info('Ticker stopped')
Esempio n. 6
0
 def start(self, message):
     self.__result = ''
     self.__working = True
     self.__process = Process(target=self.__work__,
                              args=(message, ),
                              name=self.__name + '_thread')
     self.__process.start()
Esempio n. 7
0
def state_estimation_input():
    user_id = str(uuid.uuid4())
    if 'username' in request.form:
        if len(request.form['username']) > 0:
            # make username a safe string
            keep_chars = set(['-', '_', ' '])
            username = request.form['username'].strip()[:25]
            username = ''.join(
                [c for c in username if c.isalnum() or (c in keep_chars)])
            user_id = user_id + '-' + username
    base_path = os.path.join(current_app.config['USER_DATA_DIR'], user_id)
    os.makedirs(base_path)
    # save request.form
    with open(os.path.join(base_path, 'inputs.json'), 'w') as f:
        f.write(json.dumps(request.form))
    # TODO: if file is large, start a new thread. otherwise just
    # run the thing
    request_file = request.files
    request_form = request.form
    data_paths, gene_paths, output_filenames, init, shapes = load_upload_data(
        request_file, request_form, base_path)
    # TODO: deal with init
    P = Process(target=state_estimation_preproc,
                args=(user_id, base_path, data_paths, gene_paths,
                      output_filenames, init, shapes))
    P.start()
    #state_estimation_preproc(user_id, path)
    return redirect(url_for('views.state_estimation_result', user_id=user_id))
Esempio n. 8
0
    def _cmd_start(self, params):
        """
        Start the core in a different thread, monitor keystrokes in the main
        thread.

        :return: None
        """
        # Check if the console output plugin is enabled or not, and warn.
        output_plugins = self._w3af.plugins.get_enabled_plugins('output')
        if 'console' not in output_plugins:
            msg = "Warning: You disabled the console output plugin. If you"\
                  " start a new scan, the discovered vulnerabilities won\'t be"\
                  " printed to the console, we advise you to enable at least"\
                  " one output plugin in order to be able to actually see the"\
                  " the scan output."
            print msg

        # Note that I'm NOT starting this in a new multiprocess Process
        # please note the multiprocessing.dummy , this is required because
        # I want to start new threads inside this thread and there is a bug
        # with that http://bugs.python.org/issue10015
        self._scan_thread = Process(target=self._real_start)
        self._scan_thread.name = 'ConsoleScanThread'
        self._scan_thread.daemon = True
        self._scan_thread.start()

        # let the core thread start
        time.sleep(1)

        try:
            if self._w3af.status.get_status() != 'Not running.':
                self.show_progress_on_request()
        except KeyboardInterrupt:
            om.out.console('User pressed Ctrl+C, stopping scan.')
            self._w3af.stop()
Esempio n. 9
0
def runReadData(printBool, maxIter=50):
    '''
    跑实际的数据来实现定位
    :param printBool: 【bool】是否打印输出
    :param maxIter: 【int】最大迭代次数
    :return:
    '''
    snesorDict = {'imu': 'LSM6DS3TR-C', 'magSensor': 'AK09970d'}
    readObj = ReadData(snesorDict)  # 创建读取数据的对象

    outputData = multiprocessing.Array('f', [0] * len(snesorDict) * 24)
    magBg = multiprocessing.Array('f', [0] * 6)
    state0 = multiprocessing.Array('f', [0, 0, 0.01, 1, 0, 0, 0])

    readObj.send()
    pRec = Process(target=readObj.receive, args=(outputData, magBg, None))
    # pRec.daemon = True
    pRec.start()
    time.sleep(2)

    pTrack3D = multiprocessing.Process(target=track3D, args=(state0, ))
    pTrack3D.daemon = True
    pTrack3D.start()

    while True:
        measureData = np.concatenate((outputData[:3], outputData[6:9]))
        LM(state0, measureData, 7, maxIter, printBool)
        time.sleep(0.1)
Esempio n. 10
0
 def new_process(cls, *args, **kwargs):
     process = Process(target=BaseCoroutine.start_coroutine,
                       args=(cls, ) + args,
                       kwargs=kwargs)
     process.daemon = True
     process.start()
     return process
Esempio n. 11
0
    def __init__(self, *args, **kwargs):
        super().__init__(*args, **kwargs)

        self.active = True
        self.withdraw()

        self.main_window = Window.MainWindow(self)
        self.main_window.abtn.config(command=self.do_something)

        self.main_window.protocol("WM_DELETE_WINDOW", self.on_closing)
        self.main_window.btn_exit.config(command=self.on_closing)
        self.main_window.abtn.config(command=self.connect_all)

        self.devices = []
        for name, adr in SRXAddresses:
            dev = SRXdev.SRXDevice(name, adr, username, password)
            sub_fr = Window.SubFrame(self.main_window, name, adr, '    N/A    ')
            action_with_arg = partial(self.connect_device, dev, sub_fr)
            sub_fr.abtn.config(command=action_with_arg)
            action1_with_arg = partial(self.get_curr_gw, dev, sub_fr)
            sub_fr.cbtn.config(command=action1_with_arg)
            action2_with_arg = partial(self.reset_ospf, dev, sub_fr)
            sub_fr.dbtn.config(command=action2_with_arg)
            sub_fr.setBad()
            self.devices.append([dev, sub_fr])

        process = Process(target=self.check_status, args=())
        process.start()
Esempio n. 12
0
def start_websockify():
    print('start vnc proxy..')

    t = Process(target=worker, args=())
    t.start()

    print('vnc proxy started..')
Esempio n. 13
0
def main():
    snesorDict = {'imu': 'LSM6DS3TR-C'}
    readObj = ReadData(snesorDict)
    # outputDataSigma = multiprocessing.Array('f', [0] * len(snesorDict) * 24)
    outputDataSigma = None
    magBg = multiprocessing.Array('f', [0] * 6)
    outputData = multiprocessing.Array('f', [0] * len(snesorDict) * 24)

    state = multiprocessing.Array('f', [0, 0, 0, 1, 0, 0, 0])

    # Wait a second to let the port initialize
    # readObj.send()
    # receive data in a new process
    pRec = Process(target=readObj.receive,
                   args=(outputData, magBg, outputDataSigma))
    pRec.daemon = True
    pRec.start()

    pTrack3D = multiprocessing.Process(target=track3D, args=(state, ))
    pTrack3D.daemon = True
    pTrack3D.start()

    mp = MahonyPredictor(q=state[3:], Kp=100, Ki=0.01, dt=0.002)
    while True:
        # print("a={}, w={}".format(np.round(outputData[:3], 2), np.round(outputData[3:6], 2)))
        mp.getGyroOffset(outputData[3:6])
        mp.IMUupdate(outputData[:3], outputData[3:6])
        state[3:] = mp.q
        time.sleep(0.08)
Esempio n. 14
0
class Ticker(object):
    def __init__(self, api, interval=1):
        self.api = api
        self.db = MongoClient().poloniex['ticker']
        self.interval = interval

    def updateTicker(self):
        tick = self.api.returnTicker()
        for market in tick:
            self.db.update_one({'_id': market}, {'$set': tick[market]},
                               upsert=True)
        logger.info('Ticker updated')

    def __call__(self):
        return list(self.db.find())

    def run(self):
        self._running = True
        while self._running:
            self.updateTicker()
            sleep(self.interval)

    def start(self):
        self._thread = Thread(target=self.run)
        self._thread.daemon = True
        self._thread.start()
        logger.info('Ticker started')

    def stop(self):
        self._running = False
        self._thread.join()
        logger.info('Ticker stopped')
Esempio n. 15
0
def process_updates():
    """
    Decides which type the update is and routes it to the appropriate route_updates
    method and launches a thread for the run_extensions method.
    """
    signal.signal(signal.SIGINT, signal.SIG_IGN)
    plugin_http = urllib3.PoolManager(cert_reqs='CERT_REQUIRED',
                                      ca_certs=certifi.where())
    plugin_http.timeout = urllib3.Timeout(connect=1.0)
    plugin_http.retries = 3
    update_router = RouteMessage(PLUGINS, plugin_http, GET_ME, CONFIG)
    while RUNNING.value:
        try:
            update = MESSAGE_QUEUE.get_nowait()
        except queue.Empty:
            time.sleep(SLEEP_TIME)
            continue
        extension_thread = ThreadProcess(target=run_extensions,
                                         args=(update, ))
        extension_thread.start()
        if 'message' in update:
            update_router.route_update(update['message'])
        elif 'edited_message' in update:
            update_router.route_update(update['edited_message'])
        elif 'callback_query' in update:
            route_callback_query(PLUGINS, GET_ME, CONFIG, plugin_http,
                                 update['callback_query'])
        elif 'inline_query' in update:
            route_inline_query(PLUGINS, GET_ME, CONFIG, plugin_http,
                               update['inline_query'])
        extension_thread.join()
Esempio n. 16
0
 def start(self):
     self.raft_thread = Thread(target=self.start_raft, args=())
     self.raft_thread.daemon = True
     self.raft_thread.start()
     self.monitor_thread = Thread(target=self.run_monitor, args=())
     # self.monitor_thread.daemon = True
     self.monitor_thread.start()
     pass
Esempio n. 17
0
class Worker(object):
    def __init__(self, address, name):
        self.__address = address
        self.__socket = socket.socket()
        self.__name = name
        self.__working = False
        self.__process = None
        self.__result = ''

    @property
    def working(self):
        return self.__working

    @property
    def result(self):
        return self.__result

    def __connect__(self):
        self.__socket.connect(self.__address)

    def __send__(self, message):
        message = message.encode()
        self.__socket.send(message)

    def join(self):
        self.__process.join()

    def __recv__(self):
        text = b''
        while True:
            data = self.__socket.recv(8192)
            if not data:
                break
            text += data
            if len(data) < 8192:
                break
        text = text.decode()
        return text

    def __work__(self, message):
        t1 = time.time()
        self.__socket = socket.socket()
        self.__connect__()
        self.__send__(message)
        self.__result = self.__recv__()
        t2 = time.time()
        self.__socket.close()
        #logger.debug(self.__result)
        logger.debug(self.__name + ' end work at ' + str(t2 - t1))
        self.__working = False

    def start(self, message):
        self.__result = ''
        self.__working = True
        self.__process = Process(target=self.__work__,
                                 args=(message, ),
                                 name=self.__name + '_thread')
        self.__process.start()
Esempio n. 18
0
    def __init__(self, ip, port, uri_opener, handler_klass=ProxyHandler,
                 ca_certs=CA_CERT_DIR, name='ProxyThread'):
        """
        :param ip: IP address to bind
        :param port: Port to bind
        :param uri_opener: The uri_opener that will be used to open
                           the requests that arrive from the browser
        :param handler_klass: A class that will know how to handle
                              requests from the browser
        """
        Process.__init__(self)
        self.daemon = True
        self.name = name
        
        # Internal vars
        self._server = None
        self._running = False
        self._uri_opener = uri_opener
        self._ca_certs = ca_certs

        # Stats
        self.total_handled_requests = 0

        # User configured parameters
        try:
            self._config = ProxyConfig(cadir=self._ca_certs,
                                       ssl_version_client='SSLv23',
                                       ssl_version_server='SSLv23',
                                       host=ip,
                                       port=port)
        except AttributeError as ae:
            if str(ae) == "'module' object has no attribute '_lib'":
                # This is a rare issue with the OpenSSL setup that some users
                # (mostly in mac os) find. Not related with w3af/mitmproxy but
                # with some broken stuff they have
                #
                # https://github.com/mitmproxy/mitmproxy/issues/281
                # https://github.com/andresriancho/w3af/issues/10716
                #
                # AttributeError: 'module' object has no attribute '_lib'
                raise ProxyException(self.INCORRECT_SETUP % ae)

            else:
                # Something unexpected, raise
                raise

        # Setting these options together with ssl_version_client and
        # ssl_version_server set to SSLv23 means that the proxy will allow all
        # types (including insecure) of SSL connections
        self._config.openssl_options_client = None
        self._config.openssl_options_server = None

        # Start the proxy server
        try:
            self._server = ProxyServer(self._config)
        except socket.error, se:
            raise ProxyException('Socket error while starting proxy: "%s"'
                                 % se.strerror)
Esempio n. 19
0
    def start_work(self):
        logging.warning(settings.PROJECT_NAME + ' start work')
        self.scheduler.put_task(self.__class__.start_request)
        if self.start_monitor:
            self.monitor_porcess = Process(target=self.monitor.start_work, name="monitor")
            self.monitor_porcess.daemon = True
            self.monitor_porcess.start()

        self.__start_process()
Esempio n. 20
0
    def process_request(self, request, client_address):
        """
        Start a new thread to process the request.

        Override here
        """
        t = Process(target=self.process_request_thread, args=(request, client_address))
        t.daemon = self.daemon_threads
        t.start()
Esempio n. 21
0
def new_send_email(email):
    if not IS_ACTIVE:
        new_send_email.queue = Manager().Queue()
        process = Process(target=process_sent_queue,
                          args=(new_send_email.queue, ))
        process.daemon = True
        process.start()
        models.IS_ACTIVE = True
    new_send_email.queue.put(email)
Esempio n. 22
0
class Ticker(object):
    def __init__(self):
        self.ticker = poloniex.Poloniex().returnTicker()
        self._appRunner = ApplicationRunner(u"wss://api.poloniex.com:443",
                                            u"realm1")
        self._appProcess, self._tickThread = None, None
        self._running = False

    def __call__(self):
        return self.ticker

    def tickCatcher(self):
        print("Catching...")
        while self._running:
            try:
                tick = queue.get(timeout=1)
            except:
                continue
            else:
                self.ticker[tick[0]] = {
                    'last': tick[1],
                    'lowestAsk': tick[2],
                    'highestBid': tick[3],
                    'percentChange': tick[4],
                    'baseVolume': tick[5],
                    'quoteVolume': tick[6],
                    'isFrozen': tick[7],
                    'high24hr': tick[8],
                    'low24hr': tick[9],
                    'id': self.ticker[tick[0]]['id']
                }
        print("Done catching...")

    def start(self):
        """ Start the ticker """
        print("Starting ticker")
        self._appProcess = Process(target=self._appRunner.run,
                                   args=(TickPitcher, ))
        self._appProcess.daemon = True
        self._appProcess.start()
        self._running = True
        print('TICKER: tickPitcher process started')
        self._tickThread = Thread(target=self.tickCatcher)
        self._tickThread.deamon = True
        self._tickThread.start()
        print('TICKER: tickCatcher thread started')

    def stop(self):
        """ Stop the ticker """
        print("Stopping ticker")
        self._appProcess.terminate()
        print("Joining Process")
        self._appProcess.join()
        print("Joining thread")
        self._running = False
        self._tickThread.join()
        print("Ticker stopped.")
Esempio n. 23
0
 def manager(self):
     try:
         putter_process = Process(target=self.put_queue)
         getter_process = Process(target=self.get_queue)
         putter_process.start()
         getter_process.start()
         putter_process.join()
     except Exception as e:
         raise Exception(e.args[0])
Esempio n. 24
0
    def process_request(self, request, client_address):
        """
        Start a new thread to process the request.

        Override here
        """
        t = Process(target=self.process_request_thread,
                    args=(request, client_address))
        t.daemon = self.daemon_threads
        t.start()
Esempio n. 25
0
def main():
    """
    Creates instances of the above methods and occassionally checks for crashed
    worker processes & relaunches.
    """
    worker_process = list()
    get_update_process = Process(target=get_updates)
    get_update_process.start()
    for i in range(0, int(CONFIG['BOT_CONFIG']['workers'])):
        worker_process.append(Process(target=process_updates))
        worker_process[i].start()
    time_worker = ThreadProcess(target=check_time_args)
    time_worker.start()
    while RUNNING.value:
        time.sleep(30)
        for index, worker in enumerate(worker_process):
            if not worker.is_alive():
                del worker_process[index]
                worker_process.append(Process(target=process_updates))
                worker_process[-1].start()
        if not time_worker.is_alive():
            time_worker = ThreadProcess(target=check_time_args)
            time_worker.start()
        if not get_update_process.is_alive():
            get_update_process = Process(target=get_updates)
            get_update_process.start()
    get_update_process.join()
    time_worker.join()
    for worker in worker_process:
        worker.join()
Esempio n. 26
0
def main():
    """
    Creates instances of the above methods and occassionally checks for crashed
    worker processes & relaunches.
    """
    worker_process = list()
    get_update_process = Process(target=get_updates)
    get_update_process.start()
    for i in range(0, int(CONFIG['BOT_CONFIG']['workers'])):
        worker_process.append(Process(target=process_updates))
        worker_process[i].start()
    time_worker = ThreadProcess(target=check_time_args)
    time_worker.start()
    while RUNNING.value:
        time.sleep(30)
        for index, worker in enumerate(worker_process):
            if not worker.is_alive():
                del worker_process[index]
                worker_process.append(Process(target=process_updates))
                worker_process[-1].start()
        if not time_worker.is_alive():
            time_worker = ThreadProcess(target=check_time_args)
            time_worker.start()
        if not get_update_process.is_alive():
            get_update_process = Process(target=get_updates)
            get_update_process.start()
    get_update_process.join()
    time_worker.join()
    for worker in worker_process:
        worker.join()
Esempio n. 27
0
 def __init__(self, players, starting_player=0):
     """
     ===============  =======================================================
     players          list of :class: `Player <tichu.player.Player>` 
                      objects; representing the players in their turn 
                      order.  
     starting_player  int; todo
     ===============  =======================================================
     """
     self.players = players
     self.trick_process = Process(target=self._trickloop)
Esempio n. 28
0
def start(addr='0.0.0.0', port=8080, model_name='default'):
    global _model_ref
    global _started
    global _proc_ref
    
    if not _started:
        _model_ref = model_name
        _started = True
        _proc_ref = Process(target=_run_service, args=(addr, port))
        _proc_ref.start()
    return
Esempio n. 29
0
def qual2quant_input():
    if 'fileinput' not in request.files or 'qualinput' not in request.files:
        return error('Missing data input', 400)
    cell_file = request.files['fileinput']
    qual_file = request.files['qualinput']
    cell_data = np.loadtxt(cell_file)
    qual_data = np.loadtxt(qual_file)
    user_id = str(uuid.uuid4())
    P = Process(target=qual2quant_thread, args=(cell_data, qual_data, user_id))
    P.start()
    return redirect(url_for('qual2quant_result', user_id=user_id))
Esempio n. 30
0
    def _draw_start(self):
        if not self._somethingnew:
            return True

        # let's draw!
        q = Queue.Queue()
        evt = Event()
        th = Process(target=self._draw_real, args=(q, evt), name='GTKDraw')
        th.start()
        gobject.timeout_add(500, self._draw_end, q, evt)
        return False
Esempio n. 31
0
    def __init__(self, exec_method, ip_address, socks_port=1080):
        Process.__init__(self)
        self.daemon = True

        #    Configuration
        self._exec_method = exec_method
        self._ip_address = ip_address
        self._socks_port = socks_port

        #    Internal
        self._agent_server = None
Esempio n. 32
0
    def _draw_start(self):
        if not self._somethingnew:
            return True

        # let's draw!
        q = Queue.Queue()
        evt = Event()
        th = Process(target=self._draw_real, args=(q, evt), name='GTKDraw')
        th.start()
        gobject.timeout_add(500, self._draw_end, q, evt)
        return False
Esempio n. 33
0
    def __init__(self, exec_method, ip_address, socks_port=1080):
        Process.__init__(self)
        self.daemon = True

        #    Configuration
        self._exec_method = exec_method
        self._ip_address = ip_address
        self._socks_port = socks_port

        #    Internal
        self._agent_server = None
Esempio n. 34
0
 def bonus_check(self):
     catcha = None
     catcha = pygame.sprite.spritecollideany(self.player, self.bonus)
     if not catcha == None:
         if pygame.sprite.collide_circle_ratio(1.5)(self.player, catcha):
             if catcha.type == Bonus.POWER_M:
                 self.player.power += 10
             elif catcha.type == Bonus.POWER_L:
                 self.player.power += 50
             #TODO etc...
             p = Process(target=catcha.suck, args=(self.player.topleft, ))
             p.start()
Esempio n. 35
0
    def __init__(self, func, *args):
        Process.__init__(self)
        self.daemon = True
        self._app = current_app._get_current_object()
        self._func = func
        self._args = args
        self._LogGuid = g.LogGuid
        self._LangCode = g.LangCode
        self._UserIP = request.remote_addr
        self._method = request.full_path

        self.start()
Esempio n. 36
0
def main():
    snesorDict = {'imu': 'LSM6DS3TR-C'}
    readObj = ReadData(snesorDict)
    outputDataSigma = None
    magBg = multiprocessing.Array('f', [0] * 6)
    outputData = multiprocessing.Array('f', [0] * len(snesorDict) * 24)

    state = multiprocessing.Array('f', [0, 0, 0, 1, 0, 0, 0])

    # Wait a second to let the port initialize
    # readObj.send()
    # receive data in a new process
    pRec = Process(target=readObj.receive,
                   args=(outputData, magBg, outputDataSigma))
    pRec.daemon = True
    pRec.start()
    time.sleep(0.5)

    pTrack3D = multiprocessing.Process(target=track3D, args=(state, ))
    pTrack3D.daemon = True
    pTrack3D.start()

    i = 0
    bw = np.zeros(3)
    qEKF = QEKF()
    while True:
        for j in range(4):
            # print("w={}".format(np.round(outputData[3+6*j:6*(j+1)], 2)))
            if i < 100:
                bw += outputData[3 + 6 * j:6 * (j + 1)]
                i += 1
                if i == 100:
                    bw /= i
                    qEKF.bw = bw
                    print("get gyroscope bias:{}deg/s".format(bw))
            else:
                w = outputData[3 + 6 * j:6 * (j + 1)]
                wb = w - bw
                qEKF.F = qEKF.Fx(qEKF.dt, wb)
                print('time={:.4f}: wb={}, q={}'.format(
                    time.time(), np.round(qEKF.wb, 2), np.round(qEKF.x, 3)))
                qEKF.predict()
                qNorm = np.linalg.norm(qEKF.x)
                qEKF.x = qEKF.x / qNorm
                state[3:7] = qEKF.x[:]

                aNorm = np.linalg.norm(outputData[6 * j:6 * j + 3])
                qEKF.z = np.array(outputData[6 * j:6 * j + 3]) / aNorm
                qEKF.update(qEKF.z, HJacobian, Hx, qEKF.R)
                qNorm = np.linalg.norm(qEKF.x)
                qEKF.x = qEKF.x / qNorm
                state[3:7] = qEKF.x[:]
            time.sleep(0.037)
Esempio n. 37
0
def scoreDuplicates(records, data_model, classifier, num_cores=1, threshold=0):
    if num_cores < 2:
        from multiprocessing.dummy import Process, Queue
        SimpleQueue = Queue
    else:
        from .backport import Process, SimpleQueue, Queue

    first, records = peek(records)
    if first is None:
        raise BlockingError("No records have been blocked together. "
                            "Is the data you are trying to match like "
                            "the data you trained on?")

    record_pairs_queue = Queue(2)
    score_queue = SimpleQueue()
    result_queue = SimpleQueue()

    n_map_processes = max(num_cores, 1)
    score_records = ScoreDupes(data_model, classifier, threshold)
    map_processes = [Process(target=score_records,
                             args=(record_pairs_queue,
                                   score_queue))
                     for _ in range(n_map_processes)]
    [process.start() for process in map_processes]

    reduce_process = Process(target=mergeScores,
                             args=(score_queue,
                                   result_queue,
                                   n_map_processes))
    reduce_process.start()

    fillQueue(record_pairs_queue, records, n_map_processes)

    result = result_queue.get()
    if isinstance(result, Exception):
        raise ChildProcessError

    if result:
        scored_pairs_file, dtype, size = result
        scored_pairs = numpy.memmap(scored_pairs_file,
                                    dtype=dtype,
                                    shape=(size,))
    else:
        dtype = numpy.dtype([('pairs', object, 2),
                             ('score', 'f4', 1)])
        scored_pairs = numpy.array([], dtype=dtype)

    reduce_process.join()
    [process.join() for process in map_processes]

    return scored_pairs
Esempio n. 38
0
def _add_doi(metadata, identifier, citekey):
    """Add an entry from a DOI."""
    info_messages = []
    with StatusMessage('Querying DOI metadata...') as message:
        if metadata.doi_exists(identifier):
            raise ZoiaAddException(f'DOI {identifier} already exists.')

        # Query Semantic Scholar to get the corresponding arxiv ID (if there is
        # one) in a separate thread.
        arxiv_queue = ThreadQueue()
        arxiv_process = ThreadProcess(
            target=lambda q, x: q.put(requests.get(x)),
            args=(
                arxiv_queue,
                f'https://api.semanticscholar.org/v1/paper/{identifier}',
            ),
        )
        arxiv_process.start()

        doi_metadata = _get_doi_metadata(identifier)

        metadatum = zoia.backend.metadata.Metadatum.from_dict(doi_metadata)

        if citekey is None:
            citekey = zoia.parse.citekey.create_citekey(metadata, metadatum)

        paper_dir = os.path.join(metadata.config.library_root, citekey)
        os.mkdir(paper_dir)

        message.update(
            'Querying Semantic Scholar for corresponding arXiv ID...')
        arxiv_metadata_response = arxiv_queue.get()
        arxiv_process.join()

        arxiv_metadata = json.loads(arxiv_metadata_response.text)

        if (arxiv_id := arxiv_metadata.get('arxivId')) is not None:
            doi_metadata['arxiv_id'] = arxiv_id
            message.update('Downloading PDF from arXiv...')
            pdf_response = requests.get(
                f'https://arxiv.org/pdf/{arxiv_id}.pdf')

            if pdf_response.status_code == 200:
                with open(os.path.join(paper_dir, 'document.pdf'), 'wb') as fp:
                    fp.write(pdf_response.content)
                doi_metadata['pdf_md5'] = hashlib.md5(
                    pdf_response.content).hexdigest()
            else:
                info_messages.append('Was unable to fetch a PDF')

        metadata[citekey] = doi_metadata
Esempio n. 39
0
    def test_pause(self):
        output = Queue.Queue()
        self.uri_opener.pause(True)

        def send(uri_opener, output):
            url = URL('http://moth/')
            http_response = uri_opener.GET(url)
            output.put(http_response)

        th = Process(target=send, args=(self.uri_opener, output))
        th.daemon = True
        th.start()

        self.assertRaises(Queue.Empty, output.get, True, 2)
Esempio n. 40
0
File: nba.py Progetto: agnimit/cs411
def get_stats():
	print 'Fetching NBA player stats...'
	stats_outfile = RUNDAY+'_nba_stats.csv'
	csvout = open(stats_outfile, 'wb')

	NUM_THREADS = 8

	in_queue = Queue()
	out_queue = Queue()
	queue_players(in_queue)

	while not in_queue.empty():	
		jobs = []

		for i in range(NUM_THREADS):
			if not in_queue.empty():
				thread = Process(target=get_stats_helper, args=(in_queue, out_queue))
				jobs.append(thread)
				thread.start()
		for thread in jobs:
			thread.join()	

		while not out_queue.empty():
			player = out_queue.get()
			del player['SUCCESS']
			try: 
				name = player['NAME']
			except KeyError as e:
				continue
			player['TIME'] = RUNDAY
			fieldnames = [
				'TIME',
				'NAME', 
				'JERSEY',
				'SPORT',
				'TEAM',
				'POSITION',
				'PTS',
				'REB',
				'AST',
				'URL'
			]
		
			csvwriter = csv.DictWriter(csvout, delimiter='|', fieldnames=fieldnames)
			csvwriter.writerow(player)
	csvout.close()

	print 'Finished fetching NBA player stats.'
	print 'Ouput saved in %s' % stats_outfile
Esempio n. 41
0
    def __init__(self):

        pool = Pool(processes=2)
        self.graph = getGraph()

        files = findFiles(opts)

        self.progressQueue = Queue()
        reporter = Process(target=ProgressReport,
                           args=(self.progressQueue, len(files)))
        reporter.start()
        result = pool.map(self.cacheFile, enumerate(files), chunksize=5)
        self.progressQueue.put('END')
        log.info("finished, %s results", len(result))
        reporter.join()
Esempio n. 42
0
    def _cmd_start(self, params):
        """
        Start the core in a different thread, monitor keystrokes in the main
        thread.

        :return: None
        """
        # Check if the console output plugin is enabled or not, and warn.
        output_plugins = self._w3af.plugins.get_enabled_plugins('output')
        if 'console' not in output_plugins:
            msg = "\nWarning: You disabled the console output plugin. If you"\
                  " start a new scan, the discovered vulnerabilities won\'t be"\
                  " printed to the console, we advise you to enable at least"\
                  " one output plugin in order to be able to actually see the"\
                  " the scan output."
            print msg

        # Note that I'm NOT starting this in a new multiprocess Process
        # please note the multiprocessing.dummy , this is required because
        # I want to start new threads inside this thread and there is a bug
        # with that http://bugs.python.org/issue10015
        self._scan_thread = Process(target=self._real_start)
        self._scan_thread.name = 'ConsoleScanThread'
        self._scan_thread.daemon = True
        self._scan_thread.start()
        
        # let the core thread start
        time.sleep(1)
        
        try:
            if self._w3af.status.get_status() != 'Not running.':
                self.show_progress_on_request()
        except KeyboardInterrupt:
            om.out.console('User pressed Ctrl+C, stopping scan.')
            self._w3af.stop()
Esempio n. 43
0
 def start(self):
     """ Start Loaner.thread"""
     self._thread = Thread(target=self._run)
     self._thread.daemon = True
     self._running = True
     self._thread.start()
     logging.info(P('LOANER:')+C(' started'))
Esempio n. 44
0
	def startTicker(self):
		""" Starts the 'tickcatcher' subprocess and 'tickCatcher' thread"""
		self._tickerP = Popen(["python", "tickcatcher.py"], stdout=PIPE, bufsize=1)
		print('TICKER: tickcatcher subprocess started')
		
		self._tickerT = Thread(target=self.tickCatcher);self._tickerT.daemon = True
		self._tickerT.start()
		print('TICKER: tickCatcher thread started')
Esempio n. 45
0
class Ticker(object):
	""" Ticker object for controlling the ticker thread and subprocess
		Holds poloniex ticker dict under self.markets"""
	def __init__(self):
		self._tickerP, self._tickerT = [None, None]
		self.markets = poloniex.Poloniex().marketTicker()
		
	def startTicker(self):
		""" Starts the 'tickcatcher' subprocess and 'tickCatcher' thread"""
		self._tickerP = Popen(["python", "tickcatcher.py"], stdout=PIPE, bufsize=1)
		print('TICKER: tickcatcher subprocess started')
		
		self._tickerT = Thread(target=self.tickCatcher);self._tickerT.daemon = True
		self._tickerT.start()
		print('TICKER: tickCatcher thread started')
	
	def stopTicker(self):
		""" Stops the ticker subprocess"""
		self._tickerP.terminate();self._tickerP.kill()
		print('TICKER: Ticker subprocess stopped')
		self._tickerT.join()
		print('TICKER: Ticker thread joined')
	
	def tickCatcher(self):
		with self._tickerP.stdout:
			for line in iter(self._tickerP.stdout.readline, b''):
				try:
					tick = json.loads(line[25:]) # shave off twisted timestamp (probably a better way to remove the timestamp...)
					self.markets[tick[0]] = {
							'last':tick[1], 
							'lowestAsk':tick[2], 
							'highestBid':tick[3], 
							'percentChange':tick[4], 
							'baseVolume':tick[5], 
							'quoteVolume':tick[6], 
							'isFrozen':tick[7], 
							'high24hr':tick[8], 
							'low24hr':tick[9],
							'id':self.markets[tick[0]]['id']
							}
				except Exception as e:
					print(e)
				
		self._tickerP.wait()
Esempio n. 46
0
File: core.py Progetto: Oge77/dedupe
def scoreDuplicates(records, data_model, classifier, num_cores=1, threshold=0) :
    if num_cores < 2 :
        from multiprocessing.dummy import Process, Pool, Queue
        SimpleQueue = Queue
    else :
        from .backport import Process, Pool, SimpleQueue

    record_pairs_queue = SimpleQueue()
    score_queue =  SimpleQueue()
    result_queue = SimpleQueue()

    n_map_processes = max(num_cores-1, 1)
    score_records = ScoreRecords(data_model, classifier, threshold) 
    map_processes = [Process(target=score_records,
                             args=(record_pairs_queue,
                                   score_queue))
                     for _ in range(n_map_processes)]
    [process.start() for process in map_processes]

    reduce_process = Process(target=mergeScores,
                             args=(score_queue,
                                   result_queue,
                                   n_map_processes))
    reduce_process.start()

    fillQueue(record_pairs_queue, records, n_map_processes)

    result = result_queue.get()
    if isinstance(result, Exception) :
        raise ChildProcessError

    if result :
        scored_pairs_file, dtype = result
        scored_pairs = numpy.memmap(scored_pairs_file,
                                    dtype=dtype)
    else :
        scored_pairs = result

    reduce_process.join()
    [process.join() for process in map_processes]

    return scored_pairs
Esempio n. 47
0
def scan_stop(scan_id):
    """
    Stop a scan

    :param scan_id: The scan ID to stop
    :return: Empty result if success, 403 if the current state indicates that
             the scan can't be stopped.
    """
    scan_info = get_scan_info_from_id(scan_id)
    if scan_info is None:
        abort(404, 'Scan not found')

    if not scan_info.w3af_core.can_stop():
        abort(403, 'Scan can not be stop')

    t = Process(target=scan_info.w3af_core.stop, name='ScanStopThread', args=())
    t.daemon = True
    t.start()

    return jsonify({'message': 'Stopping scan'})
Esempio n. 48
0
def stat_files():
	all_files = []
	for root, dirs, files in os.walk('/home/gzguoyubo/mf/tw2/res/entities/custom_type'):
		ignore = False
		for ig_path in ignore_paths:
			if ig_path in root:
				ignore = True
		if ignore:
			continue
		for fname in files:
			if not fname.endswith('.py'):
				continue
			abs_file_path = join(root, fname)
			all_files.append(abs_file_path)
	
	file_sections = []
	file_total_nums = len(all_files)
	for i in xrange(P_NUM):
		start = i * file_total_nums / P_NUM
		stop = start + file_total_nums / P_NUM
		if i == P_NUM - 1:
			stop = -1
		file_sections.append(all_files[start : stop])

	res_queue = Queue()
	processes = []
	for section in file_sections:
		p = Process(target=stat_file, args=(section, res_queue))
		p.start()
		processes.append(p)
	
	for p in processes:
		p.join()
	
	total_stats = defaultdict(int)
	while not res_queue.empty():
		stat = res_queue.get()
		for author, cnt in stat.iteritems():
			total_stats[author] += cnt
	
	print total_stats
Esempio n. 49
0
    def test_pause_unpause(self):
        output = Queue.Queue()
        self.uri_opener.pause(True)

        def send(uri_opener, output):
            url = URL(get_moth_http())
            try:
                http_response = uri_opener.GET(url)
                output.put(http_response)
            except:
                output.put(None)

        th = Process(target=send, args=(self.uri_opener, output))
        th.daemon = True
        th.start()

        self.assertRaises(Queue.Empty, output.get, True, 2)

        self.uri_opener.pause(False)

        http_response = output.get()
        self.assertNotIsInstance(http_response, types.NoneType,
                                 'Error in send thread.')
        
        th.join()
        
        self.assertEqual(http_response.get_code(), 200)
        self.assertIn(self.MOTH_MESSAGE, http_response.body)
Esempio n. 50
0
    def test_pause_unpause(self):
        """
        Verify that the pause method actually works. In this case, working
        means that the process doesn't send any more HTTP requests, fact
        that is verified with the "fake" count plugin.
        """        
        core_start = Process(target=self.w3afcore.start, name='TestRunner')
        core_start.daemon = True
        core_start.start()
        
        # Let the core start, and the count plugin send some requests.
        time.sleep(5)
        count_before_pause = self.count_plugin.count
        self.assertGreater(self.count_plugin.count, 0)
        
        # Pause and measure
        self.w3afcore.pause(True)
        count_after_pause = self.count_plugin.count
        
        time.sleep(2)
        count_after_sleep = self.count_plugin.count
        
        all_equal = count_before_pause == count_after_pause == count_after_sleep
        
        self.assertTrue(all_equal)

        # Unpause and verify that all requests were sent
        self.w3afcore.pause(False)
        core_start.join()
        
        self.assertEqual(self.count_plugin.count, self.count_plugin.loops)
Esempio n. 51
0
    def test_stop(self):
        """
        Verify that the stop method actually works. In this case, working
        means that the process doesn't send any more HTTP requests after we
        stop().

        This test seems to be failing @ CircleCI because of a test dependency
        issue. If run alone in your workstation it will PASS, but if run at
        CircleCI the count plugin doesn't seem to start.
        """
        core_start = Process(target=self.w3afcore.start, name='TestRunner')
        core_start.daemon = True
        core_start.start()
        
        # Let the core start, and the count plugin send some requests.
        time.sleep(5)
        count_before_stop = self.count_plugin.count
        self.assertGreater(count_before_stop, 0)
        
        # Stop now,
        self.w3afcore.stop()
        core_start.join()

        count_after_stop = self.count_plugin.count
        
        self.assertEqual(count_after_stop, count_before_stop)
Esempio n. 52
0
    def test_pause_stop(self):
        """
        Verify that the pause method actually works. In this case, working
        means that the process doesn't send any more HTTP requests after we,
        pause and that stop works when paused.
        """
        core_start = Process(target=self.w3afcore.start, name="TestRunner")
        core_start.daemon = True
        core_start.start()

        # Let the core start, and the count plugin send some requests.
        time.sleep(5)
        count_before_pause = self.count_plugin.count
        self.assertGreater(self.count_plugin.count, 0)

        # Pause and measure
        self.w3afcore.pause(True)
        count_after_pause = self.count_plugin.count

        time.sleep(2)
        count_after_sleep = self.count_plugin.count

        all_equal = count_before_pause == count_after_pause == count_after_sleep

        self.assertTrue(all_equal)

        # Unpause and verify that all requests were sent
        self.w3afcore.stop()
        core_start.join()

        # No more requests sent after pause
        self.assertEqual(self.count_plugin.count, count_after_sleep)
Esempio n. 53
0
    def __init__(self, ip, port, uri_opener, handler_klass=ProxyHandler,
                 ca_certs=CA_CERT_DIR, name='ProxyThread'):
        """
        :param ip: IP address to bind
        :param port: Port to bind
        :param uri_opener: The uri_opener that will be used to open
                           the requests that arrive from the browser
        :param handler_klass: A class that will know how to handle
                              requests from the browser
        """
        Process.__init__(self)
        self.daemon = True
        self.name = name
        
        # Internal vars
        self._server = None
        self._running = False
        self._uri_opener = uri_opener
        self._ca_certs = ca_certs

        # Stats
        self.total_handled_requests = 0

        # User configured parameters
        self._config = ProxyConfig(cadir=self._ca_certs,
                                   ssl_version_client='all',
                                   ssl_version_server='all',
                                   host=ip,
                                   port=port)

        # Start the proxy server
        try:
            self._server = ProxyServer(self._config)
        except socket.error, se:
            raise ProxyException('Socket error while starting proxy: "%s"'
                                 % se.strerror)
 def start(self):
     """ Start the ticker """
     print("Starting ticker")
     self._appProcess = Process(
         target=self._appRunner.run,
         args=(TickPitcher,)
     )
     self._appProcess.daemon = True
     self._appProcess.start()
     self._running = True
     print('TICKER: tickPitcher process started')
     self._tickThread = Thread(target=self.tickCatcher)
     self._tickThread.deamon = True
     self._tickThread.start()
     print('TICKER: tickCatcher thread started')
Esempio n. 55
0
 def __call__(self):
     """ Starts the connection to the server """
     self._running = True
     while self._running:
         self.compileRe()
         self._listenThread = Process(name='Listener', target=self._listen)
         self._listenThread.daemon = True
         try:
             self.connect()
             self._listenThread.start()
             self.auth(self.nick)
         except:
             self.disconnect()
             continue
         self._autoJoin()
         while self._connected:
             try:
                 time.sleep(0.5)
             except:
                 self.disconnect()
Esempio n. 56
0
    def _cmd_start(self, params):
        """
        Start the core in a different thread, monitor keystrokes in the main
        thread.

        :return: None
        """
        # Check if the console output plugin is enabled or not, and warn.
        output_plugins = self._w3af.plugins.get_enabled_plugins('output')
        if 'console' not in output_plugins and len(output_plugins) == 0:
            msg = ("\nWarning: You disabled the console output plugin. If you"
                   " start a new scan, the discovered vulnerabilities won\'t be"
                   " printed to the console, we advise you to enable at least"
                   " one output plugin in order to be able to actually see the"
                   " the scan output.")
            print msg

        # Note that I'm NOT starting this in a new multiprocess Process
        # please note the multiprocessing.dummy , this is required because
        # I want to start new threads inside this thread and there is a bug
        # with that http://bugs.python.org/issue10015
        self._scan_thread = Process(target=self._real_start)
        self._scan_thread.name = 'ConsoleScanThread'
        self._scan_thread.daemon = True
        self._scan_thread.start()
        
        # let the core thread start
        scan_started = self.wait_for_start()
        if not scan_started:
            om.out.console('The scan failed to start.')
            self._w3af.stop()
            return

        try:
            self.show_progress_on_request()
        except KeyboardInterrupt:
            self.handle_scan_stop()
Esempio n. 57
0
    def test_stop(self):
        """
        Verify that the stop method actually works. In this case, working
        means that the process doesn't send any more HTTP requests after we
        stop().
        """
        core_start = Process(target=self.w3afcore.start, name="TestRunner")
        core_start.daemon = True
        core_start.start()

        # Let the core start, and the count plugin send some requests.
        time.sleep(5)
        count_before_stop = self.count_plugin.count
        self.assertGreater(count_before_stop, 0)

        # Stop now,
        self.w3afcore.stop()
        core_start.join()

        count_after_stop = self.count_plugin.count

        self.assertEqual(count_after_stop, count_before_stop)
Esempio n. 58
0
    def test_pause_unpause(self):
        output = Queue.Queue()
        self.uri_opener.pause(True)

        def send(uri_opener, output):
            url = URL('http://moth/')
            http_response = uri_opener.GET(url)
            output.put(http_response)

        th = Process(target=send, args=(self.uri_opener, output))
        th.daemon = True
        th.start()

        self.assertRaises(Queue.Empty, output.get, True, 2)

        self.uri_opener.pause(False)

        http_response = output.get()
        th.join()
        
        self.assertEqual(http_response.get_code(), 200)
        self.assertIn(self.MOTH_MESSAGE, http_response.body)