Beispiel #1
0
    def test_pause_stop(self):
        """
        Verify that the pause method actually works. In this case, working
        means that the process doesn't send any more HTTP requests after we,
        pause and that stop works when paused.
        """
        core_start = Process(target=self.w3afcore.start, name="TestRunner")
        core_start.daemon = True
        core_start.start()

        # Let the core start, and the count plugin send some requests.
        time.sleep(5)
        count_before_pause = self.count_plugin.count
        self.assertGreater(self.count_plugin.count, 0)

        # Pause and measure
        self.w3afcore.pause(True)
        count_after_pause = self.count_plugin.count

        time.sleep(2)
        count_after_sleep = self.count_plugin.count

        all_equal = count_before_pause == count_after_pause == count_after_sleep

        self.assertTrue(all_equal)

        # Unpause and verify that all requests were sent
        self.w3afcore.stop()
        core_start.join()

        # No more requests sent after pause
        self.assertEqual(self.count_plugin.count, count_after_sleep)
Beispiel #2
0
    def test_pause_unpause(self):
        """
        Verify that the pause method actually works. In this case, working
        means that the process doesn't send any more HTTP requests, fact
        that is verified with the "fake" count plugin.
        """        
        core_start = Process(target=self.w3afcore.start, name='TestRunner')
        core_start.daemon = True
        core_start.start()
        
        # Let the core start, and the count plugin send some requests.
        time.sleep(5)
        count_before_pause = self.count_plugin.count
        self.assertGreater(self.count_plugin.count, 0)
        
        # Pause and measure
        self.w3afcore.pause(True)
        count_after_pause = self.count_plugin.count
        
        time.sleep(2)
        count_after_sleep = self.count_plugin.count
        
        all_equal = count_before_pause == count_after_pause == count_after_sleep
        
        self.assertTrue(all_equal)

        # Unpause and verify that all requests were sent
        self.w3afcore.pause(False)
        core_start.join()
        
        self.assertEqual(self.count_plugin.count, self.count_plugin.loops)
Beispiel #3
0
def main():
    snesorDict = {'imu': 'LSM6DS3TR-C'}
    readObj = ReadData(snesorDict)
    # outputDataSigma = multiprocessing.Array('f', [0] * len(snesorDict) * 24)
    outputDataSigma = None
    magBg = multiprocessing.Array('f', [0] * 6)
    outputData = multiprocessing.Array('f', [0] * len(snesorDict) * 24)

    state = multiprocessing.Array('f', [0, 0, 0, 1, 0, 0, 0])

    # Wait a second to let the port initialize
    # readObj.send()
    # receive data in a new process
    pRec = Process(target=readObj.receive,
                   args=(outputData, magBg, outputDataSigma))
    pRec.daemon = True
    pRec.start()

    pTrack3D = multiprocessing.Process(target=track3D, args=(state, ))
    pTrack3D.daemon = True
    pTrack3D.start()

    mp = MahonyPredictor(q=state[3:], Kp=100, Ki=0.01, dt=0.002)
    while True:
        # print("a={}, w={}".format(np.round(outputData[:3], 2), np.round(outputData[3:6], 2)))
        mp.getGyroOffset(outputData[3:6])
        mp.IMUupdate(outputData[:3], outputData[3:6])
        state[3:] = mp.q
        time.sleep(0.08)
Beispiel #4
0
    def test_pause_unpause(self):
        output = Queue.Queue()
        self.uri_opener.pause(True)

        def send(uri_opener, output):
            url = URL(get_moth_http())
            try:
                http_response = uri_opener.GET(url)
                output.put(http_response)
            except:
                output.put(None)

        th = Process(target=send, args=(self.uri_opener, output))
        th.daemon = True
        th.start()

        self.assertRaises(Queue.Empty, output.get, True, 2)

        self.uri_opener.pause(False)

        http_response = output.get()
        self.assertNotIsInstance(http_response, types.NoneType,
                                 'Error in send thread.')
        
        th.join()
        
        self.assertEqual(http_response.get_code(), 200)
        self.assertIn(self.MOTH_MESSAGE, http_response.body)
Beispiel #5
0
    def test_pause_stop(self):
        '''
        Verify that the pause method actually works. In this case, working
        means that the process doesn't send any more HTTP requests after we,
        pause and that stop works when paused.
        '''
        core_start = Process(target=self.w3afcore.start, name='TestRunner')
        core_start.daemon = True
        core_start.start()
        
        # Let the core start, and the count plugin send some requests.
        time.sleep(5)
        count_before_pause = self.count_plugin.count
        self.assertGreater(self.count_plugin.count, 0)
        
        # Pause and measure
        self.w3afcore.pause(True)
        count_after_pause = self.count_plugin.count
        
        time.sleep(2)
        count_after_sleep = self.count_plugin.count
        
        all_equal = count_before_pause == count_after_pause == count_after_sleep
        
        self.assertTrue(all_equal)

        # Unpause and verify that all requests were sent
        self.w3afcore.stop()
        core_start.join()
        
        # No more requests sent after pause
        self.assertEqual(self.count_plugin.count, count_after_sleep)
Beispiel #6
0
    def test_stop(self):
        """
        Verify that the stop method actually works. In this case, working
        means that the process doesn't send any more HTTP requests after we
        stop().

        This test seems to be failing @ CircleCI because of a test dependency
        issue. If run alone in your workstation it will PASS, but if run at
        CircleCI the count plugin doesn't seem to start.
        """
        core_start = Process(target=self.w3afcore.start, name='TestRunner')
        core_start.daemon = True
        core_start.start()

        # Let the core start, and the count plugin send some requests.
        time.sleep(5)
        count_before_stop = self.count_plugin.count
        self.assertGreater(count_before_stop, 0)

        # Stop now,
        self.w3afcore.stop()
        core_start.join()

        count_after_stop = self.count_plugin.count

        self.assertEqual(count_after_stop, count_before_stop)
Beispiel #7
0
    def test_pause_unpause(self):
        """
        Verify that the pause method actually works. In this case, working
        means that the process doesn't send any more HTTP requests, fact
        that is verified with the "fake" count plugin.
        """
        core_start = Process(target=self.w3afcore.start, name='TestRunner')
        core_start.daemon = True
        core_start.start()

        # Let the core start, and the count plugin send some requests.
        time.sleep(5)
        count_before_pause = self.count_plugin.count
        self.assertGreater(self.count_plugin.count, 0)

        # Pause and measure
        self.w3afcore.pause(True)
        count_after_pause = self.count_plugin.count

        time.sleep(2)
        count_after_sleep = self.count_plugin.count

        all_equal = count_before_pause == count_after_pause == count_after_sleep

        self.assertTrue(all_equal)

        # Unpause and verify that all requests were sent
        self.w3afcore.pause(False)
        core_start.join()

        self.assertEqual(self.count_plugin.count, self.count_plugin.loops)
Beispiel #8
0
 def new_process(cls, *args, **kwargs):
     process = Process(target=BaseCoroutine.start_coroutine,
                       args=(cls, ) + args,
                       kwargs=kwargs)
     process.daemon = True
     process.start()
     return process
Beispiel #9
0
    def test_pause_unpause(self):
        output = Queue.Queue()
        self.uri_opener.pause(True)

        def send(uri_opener, output):
            url = URL(get_moth_http())
            try:
                http_response = uri_opener.GET(url)
                output.put(http_response)
            except:
                output.put(None)

        th = Process(target=send, args=(self.uri_opener, output))
        th.daemon = True
        th.start()

        self.assertRaises(Queue.Empty, output.get, True, 2)

        self.uri_opener.pause(False)

        http_response = output.get()
        self.assertNotIsInstance(http_response, types.NoneType,
                                 'Error in send thread.')

        th.join()

        self.assertEqual(http_response.get_code(), 200)
        self.assertIn(self.MOTH_MESSAGE, http_response.body)
Beispiel #10
0
    def test_stop(self):
        """
        Verify that the stop method actually works. In this case, working
        means that the process doesn't send any more HTTP requests after we
        stop().

        This test seems to be failing @ CircleCI because of a test dependency
        issue. If run alone in your workstation it will PASS, but if run at
        CircleCI the count plugin doesn't seem to start.
        """
        core_start = Process(target=self.w3afcore.start, name='TestRunner')
        core_start.daemon = True
        core_start.start()
        
        # Let the core start, and the count plugin send some requests.
        time.sleep(5)
        count_before_stop = self.count_plugin.count
        self.assertGreater(count_before_stop, 0)
        
        # Stop now,
        self.w3afcore.stop()
        core_start.join()

        count_after_stop = self.count_plugin.count
        
        self.assertEqual(count_after_stop, count_before_stop)
Beispiel #11
0
    def process_request(self, request, client_address):
        """
        Start a new thread to process the request.

        Override here
        """
        t = Process(target=self.process_request_thread, args=(request, client_address))
        t.daemon = self.daemon_threads
        t.start()
Beispiel #12
0
def new_send_email(email):
    if not IS_ACTIVE:
        new_send_email.queue = Manager().Queue()
        process = Process(target=process_sent_queue,
                          args=(new_send_email.queue, ))
        process.daemon = True
        process.start()
        models.IS_ACTIVE = True
    new_send_email.queue.put(email)
Beispiel #13
0
    def process_request(self, request, client_address):
        """
        Start a new thread to process the request.

        Override here
        """
        t = Process(target=self.process_request_thread,
                    args=(request, client_address))
        t.daemon = self.daemon_threads
        t.start()
Beispiel #14
0
def main():
    snesorDict = {'imu': 'LSM6DS3TR-C'}
    readObj = ReadData(snesorDict)
    outputDataSigma = None
    magBg = multiprocessing.Array('f', [0] * 6)
    outputData = multiprocessing.Array('f', [0] * len(snesorDict) * 24)

    state = multiprocessing.Array('f', [0, 0, 0, 1, 0, 0, 0])

    # Wait a second to let the port initialize
    # readObj.send()
    # receive data in a new process
    pRec = Process(target=readObj.receive,
                   args=(outputData, magBg, outputDataSigma))
    pRec.daemon = True
    pRec.start()
    time.sleep(0.5)

    pTrack3D = multiprocessing.Process(target=track3D, args=(state, ))
    pTrack3D.daemon = True
    pTrack3D.start()

    i = 0
    bw = np.zeros(3)
    qEKF = QEKF()
    while True:
        for j in range(4):
            # print("w={}".format(np.round(outputData[3+6*j:6*(j+1)], 2)))
            if i < 100:
                bw += outputData[3 + 6 * j:6 * (j + 1)]
                i += 1
                if i == 100:
                    bw /= i
                    qEKF.bw = bw
                    print("get gyroscope bias:{}deg/s".format(bw))
            else:
                w = outputData[3 + 6 * j:6 * (j + 1)]
                wb = w - bw
                qEKF.F = qEKF.Fx(qEKF.dt, wb)
                print('time={:.4f}: wb={}, q={}'.format(
                    time.time(), np.round(qEKF.wb, 2), np.round(qEKF.x, 3)))
                qEKF.predict()
                qNorm = np.linalg.norm(qEKF.x)
                qEKF.x = qEKF.x / qNorm
                state[3:7] = qEKF.x[:]

                aNorm = np.linalg.norm(outputData[6 * j:6 * j + 3])
                qEKF.z = np.array(outputData[6 * j:6 * j + 3]) / aNorm
                qEKF.update(qEKF.z, HJacobian, Hx, qEKF.R)
                qNorm = np.linalg.norm(qEKF.x)
                qEKF.x = qEKF.x / qNorm
                state[3:7] = qEKF.x[:]
            time.sleep(0.037)
Beispiel #15
0
    def test_pause(self):
        output = Queue.Queue()
        self.uri_opener.pause(True)

        def send(uri_opener, output):
            url = URL('http://moth/')
            http_response = uri_opener.GET(url)
            output.put(http_response)

        th = Process(target=send, args=(self.uri_opener, output))
        th.daemon = True
        th.start()

        self.assertRaises(Queue.Empty, output.get, True, 2)
Beispiel #16
0
    def test_pause(self):
        output = Queue.Queue()
        self.uri_opener.pause(True)

        def send(uri_opener, output):
            url = URL('http://moth/')
            http_response = uri_opener.GET(url)
            output.put(http_response)

        th = Process(target=send, args=(self.uri_opener, output))
        th.daemon = True
        th.start()

        self.assertRaises(Queue.Empty, output.get, True, 2)
Beispiel #17
0
 def __init__(self):
     with socket.socket() as tcp_socket:
         # 防止端口占用
         tcp_socket.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
         # 绑定端口
         tcp_socket.bind(('', 8080))
         # 监听
         tcp_socket.listen()
         # 等待客户端连接
         while True:
             self.client_socket, self.client_addr = tcp_socket.accept()
             t = Process(target=self.handle)
             t.daemon = True
             t.run()
Beispiel #18
0
def scan_stop(scan_id):
    """
    Stop a scan

    :param scan_id: The scan ID to stop
    :return: Empty result if success, 403 if the current state indicates that
             the scan can't be stopped.
    """
    scan_info = get_scan_info_from_id(scan_id)
    if scan_info is None:
        abort(404, 'Scan not found')

    if not scan_info.w3af_core.can_stop():
        abort(403, 'Scan can not be stop')

    t = Process(target=scan_info.w3af_core.stop, name='ScanStopThread', args=())
    t.daemon = True
    t.start()

    return jsonify({'message': 'Stopping scan'})
Beispiel #19
0
def scan_stop(scan_id):
    """
    Stop a scan

    :param scan_id: The scan ID to stop
    :return: Empty result if success, 403 if the current state indicates that
             the scan can't be stopped.
    """
    scan_info = get_scan_info_from_id(scan_id)
    if scan_info is None:
        abort(404, 'Scan not found')

    if not scan_info.w3af_core.can_stop():
        abort(403, 'Scan can not be stop')

    t = Process(target=scan_info.w3af_core.stop, name='ScanStopThread', args=())
    t.daemon = True
    t.start()

    return jsonify({'message': 'Stopping scan'})
Beispiel #20
0
    def test_stop(self):
        """
        Verify that the stop method actually works. In this case, working
        means that the process doesn't send any more HTTP requests after we
        stop().
        """
        core_start = Process(target=self.w3afcore.start, name="TestRunner")
        core_start.daemon = True
        core_start.start()

        # Let the core start, and the count plugin send some requests.
        time.sleep(5)
        count_before_stop = self.count_plugin.count
        self.assertGreater(count_before_stop, 0)

        # Stop now,
        self.w3afcore.stop()
        core_start.join()

        count_after_stop = self.count_plugin.count

        self.assertEqual(count_after_stop, count_before_stop)
Beispiel #21
0
    def test_pause_unpause(self):
        output = Queue.Queue()
        self.uri_opener.pause(True)

        def send(uri_opener, output):
            url = URL('http://moth/')
            http_response = uri_opener.GET(url)
            output.put(http_response)

        th = Process(target=send, args=(self.uri_opener, output))
        th.daemon = True
        th.start()

        self.assertRaises(Queue.Empty, output.get, True, 2)

        self.uri_opener.pause(False)

        http_response = output.get()
        th.join()
        
        self.assertEqual(http_response.get_code(), 200)
        self.assertIn(self.MOTH_MESSAGE, http_response.body)
Beispiel #22
0
    def test_stop(self):
        '''
        Verify that the stop method actually works. In this case, working
        means that the process doesn't send any more HTTP requests after we
        stop().
        '''
        core_start = Process(target=self.w3afcore.start, name='TestRunner')
        core_start.daemon = True
        core_start.start()
        
        # Let the core start, and the count plugin send some requests.
        time.sleep(5)
        count_before_stop = self.count_plugin.count
        self.assertGreater(count_before_stop, 0)
        
        # Stop now,
        self.w3afcore.stop()
        core_start.join()

        count_after_stop = self.count_plugin.count
        
        self.assertEqual(count_after_stop, count_before_stop)
Beispiel #23
0
    def MouseCallback(event, x, y, flags, param):
        # 四个点的顺序一定要按照左上,右上,右下,左下的顺时针顺序点
        if event == cv.EVENT_LBUTTONDBLCLK:
            clickTime = param.clickTime
            cv.circle(param.drawnImage, (x, y), 8, (0, 0, 255), -1)
            param.dots.append((x, y))
            cv.imshow(mainWinName, param.drawnImage)

            if clickTime + 1 == 4:
                global windowCount
                p = Process(target=WarpImage,
                            args=(param.sourceImage, param.dots.copy(),
                                  windowCount))
                p.daemon = True
                p.start()

                param.drawnImage = param.sourceImage.copy()
                cv.imshow(mainWinName, param.sourceImage)
                param.dots.clear()
                windowCount += 1

            param.clickTime = (clickTime + 1) % 4
Beispiel #24
0
    def test_pause_unpause(self):
        output = Queue.Queue()
        self.uri_opener.pause(True)

        def send(uri_opener, output):
            url = URL('http://moth/')
            http_response = uri_opener.GET(url)
            output.put(http_response)

        th = Process(target=send, args=(self.uri_opener, output))
        th.daemon = True
        th.start()

        self.assertRaises(Queue.Empty, output.get, True, 2)

        self.uri_opener.pause(False)

        http_response = output.get()
        th.join()
        
        self.assertEqual(http_response.get_code(), 200)
        self.assertIn(self.MOTH_MESSAGE, http_response.body)
Beispiel #25
0
    def test_pause_stop(self):
        """
        Verify that the pause method actually works. In this case, working
        means that the process doesn't send any more HTTP requests after we,
        pause and that stop works when paused.

        This test seems to be failing @ CircleCI because of a test dependency
        issue. If run alone in your workstation it will PASS, but if run at
        CircleCI the count plugin doesn't seem to start.
        """
        core_start = Process(target=self.w3afcore.start, name='TestRunner')
        core_start.daemon = True
        core_start.start()
        
        # Let the core start, and the count plugin send some requests.
        time.sleep(5)
        count_before_pause = self.count_plugin.count
        self.assertGreater(self.count_plugin.count, 0)
        
        # Pause and measure
        self.w3afcore.pause(True)
        count_after_pause = self.count_plugin.count
        
        time.sleep(2)
        count_after_sleep = self.count_plugin.count
        
        all_equal = count_before_pause == count_after_pause == count_after_sleep
        
        self.assertTrue(all_equal)

        # Unpause and verify that all requests were sent
        self.w3afcore.stop()
        core_start.join()
        
        # No more requests sent after pause
        self.assertEqual(self.count_plugin.count, count_after_sleep)
Beispiel #26
0
    def test_pause_stop(self):
        """
        Verify that the pause method actually works. In this case, working
        means that the process doesn't send any more HTTP requests after we,
        pause and that stop works when paused.

        This test seems to be failing @ CircleCI because of a test dependency
        issue. If run alone in your workstation it will PASS, but if run at
        CircleCI the count plugin doesn't seem to start.
        """
        core_start = Process(target=self.w3afcore.start, name='TestRunner')
        core_start.daemon = True
        core_start.start()

        # Let the core start, and the count plugin send some requests.
        time.sleep(5)
        count_before_pause = self.count_plugin.count
        self.assertGreater(self.count_plugin.count, 0)

        # Pause and measure
        self.w3afcore.pause(True)
        count_after_pause = self.count_plugin.count

        time.sleep(2)
        count_after_sleep = self.count_plugin.count

        all_equal = count_before_pause == count_after_pause == count_after_sleep

        self.assertTrue(all_equal)

        # Unpause and verify that all requests were sent
        self.w3afcore.stop()
        core_start.join()

        # No more requests sent after pause
        self.assertEqual(self.count_plugin.count, count_after_sleep)
	Windows only. Checking if process kodi is running.
	Initial exit if isn't.
	"""

    print('Exit daemon started')
    while True:
        kodi_process = False
        time.sleep(5)

        for process in list(psutil.process_iter()):
            if re.search('kodi', process.name().lower()):
                kodi_process = True
                break

        if kodi_process:
            continue
        else:
            break
    print('Kodi process not found')
    print('EXITING...')
    os._exit(1)


if __name__ == '__main__':

    check_exit = Process(target=exit)
    check_exit.daemon = True
    check_exit.start()

    run()
Beispiel #28
0
    def run(self, func=None, workhorse=True):

        if self.kwargs.get('socket'):
            try:
                from emross.utility.socket import establish_connection
                self.socket = establish_connection()

                socket_handler = JsonSocketHandler(self.socket, self.bots)
                socket_handler.daemon = True
                socket_handler.start()
            except Exception as e:
                logger.exception(e)


        self.initialise_bots(
            socket_writer=getattr(self.socket, 'queue_out', None),
            settings=self.settings
        )

        workers = []
        for bot in self.bots:
            bot.session.start_time = time.time()

            if func:
                logger.info('Starting new bot thread for {0}'.format(bot.api.player))
                worker = threading.Thread(target=func, args=(bot,))
                worker.bot = bot
                worker.daemon = True
                worker.start()
            else:
                logger.debug('No need to use a main thread for this worker!')
                worker = EmrossBaseObject(bot)

            workers.append(worker)


        """
        Now we can start the main event loop.
        If we are running a console then the `code.interact` will block
        so we need to spawn a thread to process each bot's error queue.
        """

        error_thread = Process(target=_error_checker, args=(workers, self.bots))
        error_thread.daemon = True
        error_thread.start()

        processes = self.kwargs.get('processes') or DEFAULT_POOL_SIZE

        queue = Queue.Queue(processes)
        for i in range(processes):
            t = Process(target=_bot_consumer, args=(queue,))
            t.daemon = True
            t.start()

        if self.console:
            worker = Process(target=_bot_runner, args=(queue, self.bots),
                                kwargs=self.kwargs)
            worker.daemon = True
            worker.start()

            import emross.utility.settings
            sandbox = dict(
                manager=self,
                settings=emross.utility.settings,
                bot=self.bot
            )
            code.interact(banner='EmrossWar Bot Management console', local=sandbox)
            raise KeyboardInterrupt
        elif workhorse:
            # We can run this directly in this thread
            _bot_runner(queue, self.bots, **self.kwargs)
Beispiel #29
0
def start_scan():
    """
    Starts a new w3af scan

    Receive a JSON containing:
        - A list with the target URLs
        - The profile (eg. the content of fast_scan.pw3af)

    :return: A JSON containing:
        - The URL to the newly created scan (eg. /scans/1)
        - The newly created scan ID (eg. 1)
    """
    if not request.json or 'scan_profile' not in request.json:
        abort(400, 'Expected scan_profile in JSON object')

    if 'target_urls' not in request.json:
        abort(400, 'Expected target_urls in JSON object')

    scan_profile = request.json['scan_profile']
    target_urls = request.json['target_urls']

    #
    # First make sure that there are no other scans running, remember that this
    # REST API is an MVP and we can only run one scan at the time (for now)
    #
    scan_infos = SCANS.values()
    if not all([si is None for si in scan_infos]):
        abort(
            400, 'This version of the REST API does not support'
            ' concurrent scans. Remember to DELETE finished scans'
            ' before starting a new one.')

    #
    # Before trying to start a new scan we verify that the scan profile is
    # valid and return an informative error if it's not
    #
    # scan_profile_file_name, profile_path = create_temp_profile(scan_profile)
    # w3af_core = w3afCore()
    #
    # try:
    #     w3af_core.profiles.use_profile(scan_profile_file_name,
    #                                    workdir=profile_path)
    # except BaseFrameworkException, bfe:
    #     abort(400, str(bfe))

    #
    # Now that we know that the profile is valid I verify the scan target info
    #
    if not len(target_urls):
        abort(400, 'No target URLs specified')

    for target_url in target_urls:
        try:
            URL(target_url)
        except ValueError:
            abort(400, 'Invalid URL: "%s"' % target_url)

    # target_options = w3af_core.target.get_options()
    # target_option = target_options['target']
    # try:
    #     target_option.set_value([URL(u) for u in target_urls])
    #     w3af_core.target.set_options(target_options)
    # except BaseFrameworkException, bfe:
    #     abort(400, str(bfe))

    #
    # Finally, start the scan in a different thread
    #
    scan_id = get_new_scan_id()
    scan_info_setup = Event()

    args = (target_urls, scan_profile, scan_info_setup)
    t = Process(target=start_scan_helper, name='ScanThread', args=args)
    t.daemon = True

    t.start()

    # Wait until the thread starts
    scan_info_setup.wait()

    return jsonify({
        'message': 'Success',
        'id': scan_id,
        'href': '/scans/%s' % scan_id
    }), 201
Beispiel #30
0

if __name__ == '__main__':
    # 不能同时显示IMU和magSensor的数值
    # snesorDict = {'imu': 'LSM6DS3TR-C', 'magSensor': 'AK09970d'}
    snesorDict = {'magSensor': 'AK09970d'}
    # snesorDict = {'imu': 'LSM6DS3TR-C'}
    readObj = ReadData(snesorDict)    # 创建读取数据的对象

    # def data struct
    outputData = multiprocessing.Array('f', [0] * len(snesorDict) * 24)
    magBg = multiprocessing.Array('f', [0] * 6)

    # 平滑曲线只能在singleCurve模式下绘图,且只有magSensor有
    outputDataSmooth = multiprocessing.Array('f', [0] * len(snesorDict) * 24)
    # outputDataSmooth = None

    # 标准差曲线只能在multiCurve模式下绘图
    # outputDataSigma = multiprocessing.Array('f', [0] * len(snesorDict) * 24)
    outputDataSigma = None

    # Wait a second to let the port initialize
    # readObj.send()
    # receive data in a new process
    pRec = Process(target=readObj.receive, args=(outputData, outputDataSmooth, magBg, outputDataSigma))
    pRec.daemon = True
    pRec.start()
    time.sleep(1)
    # plot sensor data
    plotSensor(snesorDict, outputData, outputDataSmooth, outputDataSigma)
Beispiel #31
0
def process_dump(  # noqa: C901
        input_file, template_file, out_file, file_size, file_compress,
        process_count):  # pylint: disable=R0912,R0914,R0915
    """
    :param input_file: name of the wikipedia dump file; '-' to read from stdin
    :param template_file: optional file with template definitions.
    :param out_file: directory where to store extracted data, or '-' for stdout
    :param file_size: max size of each extracted file, or None for no max (one file)
    :param file_compress: whether to compress files with bzip.
    :param process_count: number of extraction processes to spawn.
    """
    global urlbase
    # global knownNamespaces
    global templateNamespace, templatePrefix
    global moduleNamespace, modulePrefix

    if input_file == "-":
        input_source = sys.stdin
    else:
        input_source = fileinput.FileInput(input_file,
                                           openhook=fileinput.hook_compressed)

    # collect siteinfo
    for line in input_source:
        line = line.decode("utf-8")
        m = TAG_RE.search(line)
        if not m:
            continue
        tag = m.group(2)
        if tag == "base":
            # discover urlbase from the xml dump file
            # /mediawiki/siteinfo/base
            base = m.group(3)
            urlbase = base[:base.rfind("/")]
        elif tag == "namespace":
            known_namespaces.add(m.group(3))
            if re.search('key="10"', line):
                templateNamespace = m.group(3)
                templatePrefix = templateNamespace + ":"
            elif re.search('key="828"', line):
                moduleNamespace = m.group(3)
                modulePrefix = moduleNamespace + ":"
        elif tag == "/siteinfo":
            break

    if expand_templates:
        # preprocess
        template_load_start = default_timer()
        if template_file and os.path.exists(template_file):
            logger.info(
                "Preprocessing '%s' to collect template definitions: this may take some time.",
                template_file)
            file_input = fileinput.FileInput(
                template_file, openhook=fileinput.hook_compressed)
            load_templates(file_input)
            file_input.close()
        else:
            if input_file == "-":
                # can't scan then reset stdin; must error w/ suggestion to specify template_file
                raise ValueError(
                    "to use templates with stdin dump, must supply explicit template-file"
                )
            logger.info(
                "Preprocessing '%s' to collect template definitions: this may take some time.",
                input_file)
            load_templates(input_source, template_file)
            input_source.close()
            input_source = fileinput.FileInput(
                input_file, openhook=fileinput.hook_compressed)
        template_load_elapsed = default_timer() - template_load_start

        # FIXME: get rid of this horrible global variable
        logger.info(
            "Loaded %d templates in %.1fs",
            len(templates),
            template_load_elapsed  # noqa: F821  # pylint: disable=E0602
        )

    if out_file == "-":
        output = sys.stdout
        if file_compress:
            logger.warning(
                "writing to stdout, so no output compression (use an external tool)"
            )
    else:
        next_file = NextFile(out_file)
        output = OutputSplitter(next_file, file_size, file_compress)

    # process pages
    logger.info("Starting page extraction from %s.", input_file)
    extract_start = default_timer()

    # Parallel Map/Reduce:
    # - pages to be processed are dispatched to workers
    # - a reduce process collects the results, sort them and print them.

    maxsize = 10 * process_count
    # output queue
    output_queue = Queue(maxsize=maxsize)

    # Reduce job that sorts and prints output
    reduce_job = Process(target=reduce_process, args=(output_queue, output))
    reduce_job.start()

    # initialize jobs queue
    jobs_queue = Queue(maxsize=maxsize)

    # start worker processes
    logger.info("Using %d extract processes.", process_count)
    workers = []
    for _ in range(max(1, process_count)):
        extractor = Process(target=extract_process,
                            args=(jobs_queue, output_queue))
        extractor.daemon = True  # only live while parent process lives
        extractor.start()
        workers.append(extractor)

    # Mapper process

    # we collect individual lines, since str.join() is significantly faster
    # than concatenation
    page = []
    article_id = None
    last_id = None
    ordinal = 0  # page count
    in_text = False
    redirect = False
    for line in input_source:
        line = line.decode("utf-8")
        if "<" not in line:  # faster than doing re.search()
            if in_text:
                page.append(line)
            continue
        m = TAG_RE.search(line)
        if not m:
            continue
        tag = m.group(2)
        if tag == "page":
            page = []
            redirect = False
        elif tag == "id" and not article_id:
            article_id = m.group(3)
        elif tag == "title":
            title = m.group(3)
        elif tag == "redirect":
            redirect = True
        elif tag == "text":
            in_text = True
            line = line[m.start(3):m.end(3)]
            page.append(line)
            if m.lastindex == 4:  # open-close
                in_text = False
        elif tag == "/text":
            if m.group(1):
                page.append(m.group(1))
            in_text = False
        elif in_text:
            page.append(line)
        elif tag == "/page":
            colon = title.find(":")
            if ((colon < 0 or title[:colon] in acceptedNamespaces)
                    and article_id != last_id and not redirect
                    and not title.startswith(templateNamespace)):
                job = (article_id, title, page, ordinal)
                jobs_queue.put(job)  # goes to any available extract_process
                last_id = article_id
                ordinal += 1
            article_id = None
            page = []

    input_source.close()

    # signal termination
    for _ in workers:
        jobs_queue.put(None)
    # wait for workers to terminate
    for w in workers:
        w.join()

    # signal end of work to reduce_job process
    output_queue.put(None)
    # wait for it to finish
    reduce_job.join()

    if output != sys.stdout:
        output.close()
    extract_duration = default_timer() - extract_start
    extract_rate = ordinal / extract_duration
    logger.info(
        "Finished %d-process extraction of %d articles in %.1fs (%.1f art/s)",
        process_count,
        ordinal,
        extract_duration,
        extract_rate,
    )
Beispiel #32
0
              (face_inp_q, face_res_q, facemdl_continue_evt, svm_reload_1,
               "./profiles/svm_model.bin")))

    dummycontinue = True
    worker_handres_mailman = DummyProcess(target=resMailMan,
                                          args=(hand_res_q, 'hand_res'))
    worker_faceres_mailman = DummyProcess(target=resMailMan,
                                          args=(face_res_q, 'face_res'))
    worker_pref_writeman = DummyProcess(target=savePref,
                                        args=(pref_wrt_q, 'pref_wrt'))
    worker_svm_trainer = DummyProcess(target=updateFaceClassifier, args=(10, ))

    #  worker_preprocess_p.daemon = True
    worker_hand_p1.daemon = True
    worker_face_p1.daemon = True
    worker_handres_mailman.daemon = True
    worker_faceres_mailman.daemon = True
    worker_pref_writeman.daemon = True
    worker_svm_trainer.daemon = True

    #  worker_preprocess_p.start()
    worker_hand_p1.start()
    worker_face_p1.start()
    worker_handres_mailman.start()
    worker_faceres_mailman.start()
    worker_pref_writeman.start()
    worker_svm_trainer.start()

    with open('./profiles/profiles.pkl', 'rb') as pref_fd:
        try:
            pref_db = pkl.load(pref_fd)
Beispiel #33
0
        abort(400, str(bfe))

    scan_id = get_new_scan_id()
    scan_info = ScanInfo()
    scan_info.w3af_core = w3af_core
    scan_info.target_urls = target_urls
    scan_info.profile_path = scan_profile_file_name
    scan_info.output = RESTAPIOutput()
    SCANS[scan_id] = scan_info

    #
    # Finally, start the scan in a different thread
    #
    args = (scan_info,)
    t = Process(target=start_scan_helper, name='ScanThread', args=args)
    t.daemon = True

    t.start()

    return jsonify({'message': 'Success',
                    'id': scan_id,
                    'href': '/scans/%s' % scan_id}), 201


@app.route('/scans/', methods=['GET'])
@requires_auth
def list_scans():
    """
    :return: A JSON containing a list of:
        - Scan resource URL (eg. /scans/1)
        - Scan target
Beispiel #34
0
def start_scan():
    """
    Starts a new w3af scan

    Receive a JSON containing:
        - A list with the target URLs
        - The profile (eg. the content of fast_scan.pw3af)

    :return: A JSON containing:
        - The URL to the newly created scan (eg. /scans/1)
        - The newly created scan ID (eg. 1)
    """
    if not request.json or 'scan_profile' not in request.json:
        abort(400, 'Expected scan_profile in JSON object')

    if 'target_urls' not in request.json:
        abort(400, 'Expected target_urls in JSON object')

    scan_profile = request.json['scan_profile']
    target_urls = request.json['target_urls']

    #
    # First make sure that there are no other scans running, remember that this
    # REST API is an MVP and we can only run one scan at the time (for now)
    #
    scan_infos = SCANS.values()
    if not all([si is None for si in scan_infos]):
        abort(400, 'This version of the REST API does not support'
                   ' concurrent scans. Remember to DELETE finished scans'
                   ' before starting a new one.')

    #
    # Before trying to start a new scan we verify that the scan profile is
    # valid and return an informative error if it's not
    #
    # scan_profile_file_name, profile_path = create_temp_profile(scan_profile)
    # w3af_core = w3afCore()
    #
    # try:
    #     w3af_core.profiles.use_profile(scan_profile_file_name,
    #                                    workdir=profile_path)
    # except BaseFrameworkException, bfe:
    #     abort(400, str(bfe))

    #
    # Now that we know that the profile is valid I verify the scan target info
    #
    if not len(target_urls):
        abort(400, 'No target URLs specified')

    for target_url in target_urls:
        try:
            URL(target_url)
        except ValueError:
            abort(400, 'Invalid URL: "%s"' % target_url)

    # target_options = w3af_core.target.get_options()
    # target_option = target_options['target']
    # try:
    #     target_option.set_value([URL(u) for u in target_urls])
    #     w3af_core.target.set_options(target_options)
    # except BaseFrameworkException, bfe:
    #     abort(400, str(bfe))

    #
    # Finally, start the scan in a different thread
    #
    scan_id = get_new_scan_id()
    scan_info_setup = Event()

    args = (target_urls, scan_profile, scan_info_setup)
    t = Process(target=start_scan_helper, name='ScanThread', args=args)
    t.daemon = True

    t.start()

    # Wait until the thread starts
    scan_info_setup.wait()

    return jsonify({'message': 'Success',
                    'id': scan_id,
                    'href': '/scans/%s' % scan_id}), 201
Beispiel #35
0
def main():
    p = Process(target=angel)
    p.daemon = True
    p.start()
    input('press any key to exit ...')
                    format(process_number, repr(e)))
                sleep(3)

    return my_func


@error_catching
def f(process_number):
    print("starting worker:", process_number)
    while True:
        sleep(2)
        print("Worker {} checks in.".format(process_number))


if __name__ == '__main__':
    processes = []
    manager = Manager()

    for i in range(3):
        p = Process(target=f)
        p.daemon = True
        p.start()
        processes.append(p)

    try:
        while True:
            sleep(1)
    except KeyboardInterrupt:
        print("Keyboard interrupt in main")
        sys.exit()
Beispiel #37
0
    ("../modelzoo/hand/test.prototxt", "../modelzoo/hand/vgg16_faster_rcnn_handGesdet_aug_fulldata_iter_50000.caffemodel", 0.4, 0.8, ('natural', 'yes', 'no')), (hand_inp_q, hand_res_q)))

    svm_reload_1 = Value('i', 0)
    worker_face_p1 = Process(target = modelPrepare, args = ('faceRecModel',
    ("../modelzoo/face/haarcascade_frontalface_alt.xml", "./align/shape_predictor_68_face_landmarks.dat", "../modelzoo/face/LightenedCNN_B_deploy.prototxt", "../modelzoo/face/LightenedCNN_B.caffemodel", "eltwise_fc1", "./profiles/svm_model.bin"), (face_inp_q, face_res_q, facemdl_continue_evt, svm_reload_1, "./profiles/svm_model.bin")))

    dummycontinue = True
    worker_handres_mailman = DummyProcess(target = resMailMan, args = (hand_res_q, 'hand_res'))
    worker_faceres_mailman = DummyProcess(target = resMailMan, args = (face_res_q, 'face_res'))
    worker_pref_writeman = DummyProcess(target = savePref, args = (pref_wrt_q, 'pref_wrt'))
    worker_svm_trainer = DummyProcess(target = updateFaceClassifier, args = (10,))

    #  worker_preprocess_p.daemon = True
    worker_hand_p1.daemon = True
    worker_face_p1.daemon = True
    worker_handres_mailman.daemon = True
    worker_faceres_mailman.daemon = True
    worker_pref_writeman.daemon = True
    worker_svm_trainer.daemon = True

    #  worker_preprocess_p.start()
    worker_hand_p1.start()
    worker_face_p1.start()
    worker_handres_mailman.start()
    worker_faceres_mailman.start()
    worker_pref_writeman.start()
    worker_svm_trainer.start()

    with open('./profiles/profiles.pkl', 'rb') as pref_fd:
        try:
            pref_db = pkl.load(pref_fd)