Example #1
0
    def _fork_and_submit_job(self, job):
        parent_pipe, child_pipe = Pipe()
        try:
            p = Process(target=self._submit_job_to_lsf,
                        args=(child_pipe, parent_pipe, job,))
            p.start()

        except:
            parent_pipe.close()
            raise
        finally:
            child_pipe.close()

        try:
            p.join()

            result = parent_pipe.recv()
            if isinstance(result, basestring):
                raise SubmitError(result)

        except EOFError:
            raise SubmitError('Unknown exception submitting job')
        finally:
            parent_pipe.close()

        return result
Example #2
0
 def crawl(self, origin_oj):
     p = Process(
         target=self._crawl,
         args=[origin_oj]
     )
     p.start()
     p.join()
Example #3
0
 def __init__(self, spider):
     Process.__init__(self)
     settings = get_project_settings()
     self.crawler = Crawler(settings)
     self.crawler.configure()
     self.crawler.signals.connect(reactor.stop, signal=signals.spider_closed)
     self.spider = spider
Example #4
0
def clone_with_timeout(src: str, dest: str, clone_func: Callable[[], None],
                       timeout: float) -> None:
    """Clone a repository with timeout.

    Args:
        src: clone source
        dest: clone destination
        clone_func: callable that does the actual cloning
        timeout: timeout in seconds
    """
    errors: Queue = Queue()
    process = Process(target=_clone_task, args=(clone_func, errors))
    process.start()
    process.join(timeout)

    if process.is_alive():
        process.terminate()
        # Give it literally a second (in successive steps of 0.1 second),
        # then kill it.
        # Can't use `process.join(1)` here, billiard appears to be bugged
        # https://github.com/celery/billiard/issues/270
        killed = False
        for _ in range(10):
            time.sleep(0.1)
            if not process.is_alive():
                break
        else:
            killed = True
            os.kill(process.pid, signal.SIGKILL)
        raise CloneTimeout(src, timeout, killed)

    if not errors.empty():
        raise CloneFailure(src, dest, errors.get())
Example #5
0
    def add_export(self):
        # on MacOS we will not use os.fork, elsewhere this does nothing.
        forking_enable(0)

        logger.debug("Adding new export.")
        should_terminate = RawValue(c_bool,False)
        frames_to_export  = RawValue(c_int,0)
        current_frame = RawValue(c_int,0)

        data_dir = self.data_dir
        start_frame= self.start_frame.value
        end_frame= self.end_frame.value
        plugins = []

        # Here we make clones of every plugin that supports it.
        # So it runs in the current config when we lauch the exporter.
        for p in self.g_pool.plugins:
            try:
                p_initializer = p.get_class_name(),p.get_init_dict()
                plugins.append(p_initializer)
            except AttributeError:
                pass

        out_file_path=verify_out_file_path(self.rec_name.value,self.data_dir)
        process = Process(target=export, args=(should_terminate,frames_to_export,current_frame, data_dir,start_frame,end_frame,plugins,out_file_path))
        process.should_terminate = should_terminate
        process.frames_to_export = frames_to_export
        process.current_frame = current_frame
        process.out_file_path = out_file_path
        self.new_export = process
Example #6
0
	def display_graph(self, graphdef):
	    '''display a graph'''
	    if 'mestate' in globals():
	        self.mestate.console.write("Expression: %s\n" % ' '.join(graphdef.expression.split()))
	    else:
	        self.mestate.child_pipe_send_console.send("Expression: %s\n" % ' '.join(graphdef.expression.split()))
	    #mestate.mlog.reduce_by_flightmodes(mestate.flightmode_selections)

	    #setup the graph, then pass to a new process and display
	    self.mg = grapher.MavGraph()
	    self.mg.set_marker(self.mestate.settings.marker)
	    self.mg.set_condition(self.mestate.settings.condition)
	    self.mg.set_xaxis(self.mestate.settings.xaxis)
	    self.mg.set_linestyle(self.mestate.settings.linestyle)
	    self.mg.set_show_flightmode(self.mestate.settings.show_flightmode)
	    self.mg.set_legend(self.mestate.settings.legend)
	    self.mg.add_mav(self.mestate.mlog)
	    for f in graphdef.expression.split():
	        self.mg.add_field(f)
	    self.mg.process(self.mestate.flightmode_selections, self.mestate.mlog._flightmodes)
	    self.lenmavlist = len(self.mg.mav_list)
	    if platform.system() == 'Darwin':
	    	forking_enable(False)
	    #Important - mg.mav_list is the full logfile and can be very large in size
	    #To avoid slowdowns in Windows (which copies the vars to the new process)
	    #We need to empty this var when we're finished with it
	    self.mg.mav_list = []
	    child = Process(target=self.mg.show, args=[self.lenmavlist, ])
	    child.start()
	    self.mestate.mlog.rewind()
Example #7
0
def main():
    # To assign camera by name: put string(s) in list

    # Parse command line arguments
    parser = argparse.ArgumentParser(description='GUI for gaze tracking and pupillometry')
    parser.add_argument('-eye', dest='eye_file', type=str, help="Work with existing video recording, instead of live feed", default='')
    parser.add_argument('-world', dest='world_file', type=str, help="Work with existing video recording, instead of live feed", default='')

    args = parser.parse_args()

    # to use a pre-recorded video.
    # Use a string to specify the path to your video file as demonstrated below
    if args.eye_file == '':
        eye_src = ["UI154xLE-M", "USB Camera-B4.09.24.1", "FaceTime Camera (Built-in)", "Microsoft", "6000","Integrated Camera"]
        # to assign cameras directly, using integers as demonstrated below
        # eye_src = 1
    else:
#        print "Using provide file: %s" % args.filename
        eye_src = args.eye_file

    if args.world_file == '':
        world_src = ["Logitech Camera","(046d:081d)","C510","B525", "C525","C615","C920","C930e"]
        # to assign cameras directly, using integers as demonstrated below
        # world_src = 0
    else:
        world_src = args.world_file

    # Camera video size in pixels (width,height)
    eye_size = (260,216) #(1280,1024)
    world_size = (640,480)


    # on MacOS we will not use os.fork, elsewhere this does nothing.
    forking_enable(0)

    # Create and initialize IPC
    g_pool = Temp()
    g_pool.pupil_queue = Queue()
    g_pool.eye_rx, g_pool.eye_tx = Pipe(False)
    g_pool.quit = RawValue(c_bool,0)
    # this value will be substracted form the capture timestamp
    g_pool.timebase = RawValue(c_double,0)
    # make some constants avaiable
    g_pool.user_dir = user_dir
    g_pool.rec_dir = rec_dir
    g_pool.version = version
    g_pool.app = 'capture'
    # set up subprocesses
    p_eye = Process(target=eye, args=(g_pool,eye_src,eye_size))

    # Spawn subprocess:
    p_eye.start()
    if platform.system() == 'Linux':
        # We need to give the camera driver some time before requesting another camera.
        sleep(0.5)

    world(g_pool,world_src,world_size)

    # Exit / clean-up
    p_eye.join()
 def __init__(self, spider):
     Process.__init__(self)
     self.crawler = Crawler(spider.__class__, get_project_settings())
     # self.crawler.configure()
     self.crawler.signals.connect(reactor.stop,
                                  signal=signals.spider_closed)
     self.spider = spider
    def display_graph(self, graphdef):
        '''display a graph'''
        if 'mestate' in globals():
            self.mestate.console.write("Expression: %s\n" %
                                       ' '.join(graphdef.expression.split()))
        else:
            self.mestate.child_pipe_send_console.send(
                "Expression: %s\n" % ' '.join(graphdef.expression.split()))
        #mestate.mlog.reduce_by_flightmodes(mestate.flightmode_selections)

        #setup the graph, then pass to a new process and display
        self.mg = grapher.MavGraph()
        self.mg.set_marker(self.mestate.settings.marker)
        self.mg.set_condition(self.mestate.settings.condition)
        self.mg.set_xaxis(self.mestate.settings.xaxis)
        self.mg.set_linestyle(self.mestate.settings.linestyle)
        self.mg.set_show_flightmode(self.mestate.settings.show_flightmode)
        self.mg.set_legend(self.mestate.settings.legend)
        self.mg.add_mav(self.mestate.mlog)
        for f in graphdef.expression.split():
            self.mg.add_field(f)
        self.mg.process(self.mestate.flightmode_selections,
                        self.mestate.mlog._flightmodes)
        self.lenmavlist = len(self.mg.mav_list)
        if platform.system() == 'Darwin':
            forking_enable(False)
        #Important - mg.mav_list is the full logfile and can be very large in size
        #To avoid slowdowns in Windows (which copies the vars to the new process)
        #We need to empty this var when we're finished with it
        self.mg.mav_list = []
        child = Process(target=self.mg.show, args=[
            self.lenmavlist,
        ])
        child.start()
        self.mestate.mlog.rewind()
Example #10
0
 def crawl(
         self,
         origin_oj,
         solution_id,
         problem_id,
         language,
         code,
         username,
         nickname,
         password):
     p = Process(
         target=self._crawl,
         args=[
             origin_oj,
             solution_id,
             problem_id,
             language,
             code,
             username,
             nickname,
             password
         ]
     )
     p.start()
     p.join()
Example #11
0
 def __init__(self, spider):
     Process.__init__(self)
     setting = Settings()
     setting.setmodule(s)
     self.crawler = Crawler(setting)
     self.crawler.configure()
     self.crawler.signals.connect(reactor.stop, signal=signals.spider_closed)
     self.spider = spider
Example #12
0
def main():
    timePipe, sigPipe = Pipe()
    q = Queue()
    clock = Process(target=tickTock, args=(timePipe, ))
    testSignal = Process(target=signal, args=(q, sigPipe, [1]))
    testSignal.start()
    while True:
        print(q.get())
Example #13
0
def check_user_presence():
    for device in Device.objects.all():
        lock_id = 'scan-device-lock-{}'.format(device.id)
        if acquire_lock(lock_id):
            t = Process(target=presence.scan_device, args=(device, lock_id))
            t.start()

    return "ok"
Example #14
0
def cmd_save(args):
    '''save a graph'''
    child = Process(target=save_process,
                    args=[
                        mestate.last_graph, mestate.child_pipe_send_console,
                        mestate.child_pipe_send_graph, mestate.status.msgs
                    ])
    child.start()
Example #15
0
 def test_set_pdeathsig(self):
     return_pid = Value('i')
     p = Process(target=parent_task, args=(return_pid,))
     p.start()
     sleep(3) # wait for setting pdeathsig
     p.terminate()
     sleep(3) # wait for process termination
     with pytest.raises(psutil.NoSuchProcess):
         proc = psutil.Process(return_pid.value)
Example #16
0
 def crawl(self, spider):
     queue = Queue()
     self.queue = Queue()
     self.process = Process(target=self._crawl, args=(queue, spider))
     self.process.start()
     write_in_a_file('.crawl 1', {'process': self.process, 'process-pid': self.process and self.process.pid, 'queue': self.queue.qsize()}, "t.txt")
     self.process.join()
     write_in_a_file('.crawl 2', {'process': self.process,
                                'process-pid': self.process and self.process.pid, 'queue': self.queue.qsize()}, "t.txt")
Example #17
0
def telnet_client(server_id, port):
    key = 'server-{0}-pid'.format(server_id)
    pid = cache.get(key)
    if pid and pid in psutil.get_pid_list():
        os.kill(pid, signal.SIGTERM)
    p = Process(target=TelnetClient, args=(port, 'uptee', server_id))
    p.start()
    cache.set(key, p.pid)
    p.join()
Example #18
0
    def __init__(self, spider):
        Process.__init__(self)
        settings = get_project_settings()
        self.crawler = Crawler(settings)
        if not hasattr(project, 'crawler'):
            self.crawler.install()
            self.crawler.configure()

        self.crawler.signals.connect(reactor.stop, signal=signals.spider_closed)
        self.spider = spider
Example #19
0
 def crawl(self, number, passwd):
     p = Process(
         target=self._crawl,
         args=[
             number,
             passwd
         ]
     )
     p.start()
     p.join()
Example #20
0
        def __init__(self, spider):
            Process.__init__(self)
            settings = get_project_settings()
            self.crawler = Crawler(settings)

            if not hasattr(project, 'crawler'):
                self.crawler.install()
                self.crawler.configure()
                self.crawler.signals.connect(reactor.stop, signal=signals.spider_closed)
            self.spider = spider
Example #21
0
 def init_marker_cacher(self):
     forking_enable(0) #for MacOs only
     from marker_detector_cacher import fill_cache
     visited_list = [False if x == False else True for x in self.cache]
     video_file_path =  os.path.join(self.g_pool.rec_dir,'world.avi')
     self.cache_queue = Queue()
     self.cacher_seek_idx = Value(c_int,0)
     self.cacher_run = Value(c_bool,True)
     self.cacher = Process(target=fill_cache, args=(visited_list,video_file_path,self.cache_queue,self.cacher_seek_idx,self.cacher_run))
     self.cacher.start()
Example #22
0
    def __init__(self, spider):
        Process.__init__(self)
        setting = Settings()
        setting.setmodule(settings,1)
        self.crawler = Crawler(setting)

        if not hasattr(project, 'crawler'):
            self.crawler.configure()
            self.crawler.signals.connect(reactor.stop, signal = signals.spider_closed)
        self.spider = spider
Example #23
0
class NewBilliardMPWorker(CommonWorker):
    def __init__(self, callback=None):
        super(NewBilliardMPWorker).__init__(callback)

    def start(self):
        self.task = Process(target=common_run, daemon=True)
        self.task.start()

    def stop(self):
        pass
Example #24
0
 def crawl(self, oj, run_id):
     p = Process(
         target=self._crawl,
         args=[
             oj,
             run_id
         ]
     )
     p.start()
     p.join()
Example #25
0
 def crawl(self, origin_oj, username):
     p = Process(
         target=self._crawl,
         args=[
             origin_oj,
             username,
         ]
     )
     p.start()
     p.join()
Example #26
0
 def __init__(self, spider,key_word,crawl_num,n_crawls):
     Process.__init__(self)
     settings = get_project_settings()
     self.spider = spider
     self.crawler = Crawler(spider.__class__, settings)
     # self.crawler.configure()
     self.crawler.signals.connect(reactor.stop, signal=signals.spider_closed)
     self.n_crawls = n_crawls
     self.crawl_num = crawl_num
     self.key_word = key_word
Example #27
0
 def crawl(self, oj, username, password):
     p = Process(
         target=self._crawl,
         args=[
             oj,
             username,
             password
         ]
     )
     p.start()
     p.join()
 def init_marker_cacher(self):
     forking_enable(0) #for MacOs only
     from marker_detector_cacher import fill_cache
     visited_list = [False if x == False else True for x in self.cache]
     video_file_path =  self.g_pool.capture.src
     timestamps = self.g_pool.capture.timestamps
     self.cache_queue = Queue()
     self.cacher_seek_idx = Value('i',0)
     self.cacher_run = Value(c_bool,True)
     self.cacher = Process(target=fill_cache, args=(visited_list,video_file_path,timestamps,self.cache_queue,self.cacher_seek_idx,self.cacher_run,self.min_marker_perimeter_cacher))
     self.cacher.start()
Example #29
0
def main():

    # To assign camera by name: put string(s) in list
    eye_cam_names = ["USB 2.0 Camera","Microsoft", "6000","Integrated Camera","HD USB Camera"]
    world_src = ["Logitech Camera","(046d:081d)","C510","B525", "C525","C615","C920","C930e"]
    eye_src = (eye_cam_names,0),(eye_cam_names,1) #first match for eye0 and second match for eye1

    # to assign cameras directly, using integers as demonstrated below
    # eye_src =  4 , 5 #second arg will be ignored for monocular eye trackers
    # world_src = 1

    # to use a pre-recorded video.
    # Use a string to specify the path to your video file as demonstrated below
    # eye_src = '/Users/mkassner/Downloads/000/eye0.mkv' , '/Users/mkassner/Downloads/eye.avi'
    # world_src = "/Users/mkassner/Downloads/000/world.mkv"

    # Camera video size in pixels (width,height)
    eye_size = (640,480)
    world_size = (1280,720)


    # on MacOS we will not use os.fork, elsewhere this does nothing.
    forking_enable(0)

    #g_pool holds variables. Only if added here they are shared across processes.
    g_pool = Global_Container()

    # Create and initialize IPC
    g_pool.pupil_queue = Queue()
    g_pool.quit = Value(c_bool,0)
    g_pool.timebase = Value(c_double,0)
    g_pool.eye_tx = []
    # make some constants avaiable
    g_pool.user_dir = user_dir
    g_pool.version = get_version(version_file)
    g_pool.app = 'capture'
    g_pool.binocular = binocular


    p_eye = []
    for eye_id in range(1+1*binocular):
        rx,tx = Pipe(False)
        p_eye += [Process(target=eye, args=(g_pool,eye_src[eye_id],eye_size,rx,eye_id))]
        g_pool.eye_tx += [tx]
        p_eye[-1].start()

    p_world = Process(target=world,args=(g_pool,world_src,world_size))
    # world(g_pool,world_src,world_size)
    p_world.start()
    p_world.join()
    # Exit / clean-up
    for p in p_eye:
        p.join()
Example #30
0
 def __init__(self, fun, args, postprocess, job):
     """ Build multiprocessing queues and start worker. """
     super(LongCalculation, self).__init__(job, "Cancel", 0, 0)
     self.setModal(True)
     self.input = Queue()
     self.output = Queue()
     self.input.put((fun, args, postprocess))
     self.proc = Process(target=worker, args=(self.input, self.output))
     self.proc.start()
     self.timer = QTimer()
     self.timer.timeout.connect(self.update)
     self.timer.start(10)
Example #31
0
    def run_player(self, graphdef):
        if 'mestate' in globals():
            self.mestate.console.write("Running Player...")
        else:
            self.mestate.child_pipe_send_console.send("Running Player...")
        self.player = player.MavPlay()
        self.player.add_mav(self.mlog)
        if platform.system() == 'Darwin':
            forking_enable(False)

        child = Process(target=self.player.run)
        child.start()
Example #32
0
def start_launch_servide_process(ld):
    """Starts a Launch Service process. To be called from subclasses.

    Args:
         ld : LaunchDescription obj.
    """
    # Create the LauchService and feed the LaunchDescription obj. to it.
    ls = LaunchService()
    ls.include_launch_description(ld)
    p = Process(target=ls.run)
    p.daemon = True  #The daemon process is terminated automatically before the main program exits, to avoid leaving orphaned processes running
    p.start()
Example #33
0
class CrawlerScript():

    def __init__(self):

        self.process = None
        self.items = []
        self._count = 0
        self.queue = None
        self._init_signals()


    def _init_signals(self):
        dispatcher.connect(self._so, signals.spider_opened)
        dispatcher.connect(self._item_scraped, signals.item_scraped)
        dispatcher.connect(self._sc, signals.spider_closed)


    def _so(self):
        write_in_a_file('spider_opened 1', {'open': 'open!', 'x': self.x, 'process': self.process, 'process-pid': self.process and self.process.pid}, "t.txt")


    def _sc(self):
        write_in_a_file('spider_closed', {'scraped items': len(self.items)}, "t.txt")

    def _item_scraped(self, item, **kwargs):
        self._count = self._count + 1
        write_in_a_file('item scraped', {'count':self._count, 'item': item, 'kwargs':kwargs, 'process': self.process, 'process-pid': self.process and self.process.pid}, "t.txt")
        self.items.append(item)
        self.queue.put_nowait(item)

    def _crawl(self, queue, spider):
        crawler = CrawlerProcess(get_project_settings())
        crawler.crawl(spider)
        write_in_a_file('signals', {'signals': dir(signals)}, 'task.txt')
        write_in_a_file('._crawl start', {'process': self.process, 'process-pid': self.process and self.process.pid, 'db': dir(db), 'db.connection': dir(db.connection)}, "t.txt")
        print(dir(db.connection))
        db.connection.close()
        crawler.start()
        crawler.stop()
        write_in_a_file('._crawl ended 1', {'qsize': self.queue.qsize() }, "t.txt")
        queue.put_nowait(self.items)
        write_in_a_file('._crawlended after q 2', {'qsize': queue.qsize()}, "t.txt")

    def crawl(self, spider):
        queue = Queue()
        self.queue = Queue()
        self.process = Process(target=self._crawl, args=(queue, spider))
        self.process.start()
        write_in_a_file('.crawl 1', {'process': self.process, 'process-pid': self.process and self.process.pid, 'queue': self.queue.qsize()}, "t.txt")
        self.process.join()
        write_in_a_file('.crawl 2', {'process': self.process,
                                   'process-pid': self.process and self.process.pid, 'queue': self.queue.qsize()}, "t.txt")
Example #34
0
def run_crawler_process(params: dict) -> Process:
    """
    Start scrapy spider from a separate process
    :param dict params: scrapy spider parameters
    :return: process instance
    """
    process = Process(
        target=run_crawler,
        args=(params, ),
    )
    process.start()

    return process
Example #35
0
def run_crawler(params):
    """

    :param params:
    :return: process instance
    """
    process = Process(
        target=crawler_process,
        args=(params, ),
    )
    process.start()

    return process
Example #36
0
    def __init__(self, current_dt, server, topic):

        Process.__init__(self)

        self.current_dt = current_dt
        self.server = server
        self.topic = topic

        self.crawler = Crawler(VIXSpiderSpider,
                               settings={'USER_AGENT': user_agent})

        self.crawler.signals.connect(reactor.stop,
                                     signal=scrapy_signals.spider_closed)
Example #37
0
    def start_process(self, command=None, billiard=False, *args, **kwargs):
        from .models import ExportTask
        from ..tasks.export_tasks import TaskStates

        if billiard:
            proc = Process(daemon=False, *args, **kwargs)
            proc.start()
            self.store_pid(pid=proc.pid)
            proc.join()
            self.exitcode = proc.exitcode
        else:
            proc = subprocess.Popen(command, **kwargs)
            (self.stdout, self.stderr) = proc.communicate()
            self.store_pid(pid=proc.pid)
            self.exitcode = proc.wait()

        # We need to close the existing connection because the logger could be using a forked process which,
        # will be invalid and throw an error.
        connection.close()
        export_task = ExportTask.objects.get(uid=self.task_uid)
        if export_task.status == TaskStates.CANCELED.value:
            from ..tasks.exceptions import CancelException
            raise CancelException(
                task_name=export_task.export_provider_task.name,
                user_name=export_task.cancel_user.username)
Example #38
0
def main():
    # To assign camera by name: put string(s) in list
    eye_src = ["Microsoft", "6000","Integrated Camera"]
    world_src = ["Logitech Camera","(046d:081d)","C510","B525", "C525","C615","C920","C930e"]

    # to assign cameras directly, using integers as demonstrated below
    # eye_src = 1
    # world_src = 0

    # to use a pre-recorded video.
    # Use a string to specify the path to your video file as demonstrated below
    # eye_src = '/Users/mkassner/Pupil/datasets/p1-left/frames/test.avi'
    # world_src = "/Users/mkassner/Desktop/2014_01_21/000/world.avi"

    # Camera video size in pixels (width,height)
    eye_size = (640,360)
    world_size = (1280,720)


    # on MacOS we will not use os.fork, elsewhere this does nothing.
    forking_enable(0)

    # Create and initialize IPC
    g_pool = Temp()
    g_pool.pupil_queue = Queue()
    g_pool.eye_rx, g_pool.eye_tx = Pipe(False)
    g_pool.quit = RawValue(c_bool,0)
    # this value will be substracted form the capture timestamp
    g_pool.timebase = RawValue(c_double,0)
    # make some constants avaiable
    g_pool.user_dir = user_dir
    g_pool.rec_dir = rec_dir
    g_pool.version = version
    g_pool.app = 'capture'
    # set up subprocesses
    p_eye = Process(target=eye, args=(g_pool,eye_src,eye_size))

    # Spawn subprocess:
    p_eye.start()
    if platform.system() == 'Linux':
        # We need to give the camera driver some time before requesting another camera.
        sleep(0.5)

    world(g_pool,world_src,world_size)



    # Exit / clean-up
    p_eye.join()
Example #39
0
def cmd_map(args):
    '''map command'''
    from mavflightview import mavflightview_mav, mavflightview_options
    #mestate.mlog.reduce_by_flightmodes(mestate.flightmode_selections)
    #setup and process the map
    options = mavflightview_options()
    options.condition = mestate.settings.condition
    options._flightmodes = mestate.mlog._flightmodes
    options.show_flightmode_legend = mestate.settings.show_flightmode
    if len(args) > 0:
        options.types = ','.join(args)
    [path, wp, fen, used_flightmodes, mav_type] = mavflightview_mav(mestate.mlog, options, mestate.flightmode_selections)
    child = Process(target=map_process, args=[path, wp, fen, used_flightmodes, mav_type, options])
    child.start()
    mestate.mlog.rewind()
Example #40
0
    def run(self, jobs):
        '''Start the Scrapy engine, and execute all jobs.

        Parms:
          jobs ([Job]) - one or more Job objects to be processed.

        '''
        if not isinstance(jobs, collections.Iterable):
            jobs = [jobs]
        self.validate(jobs)

        p = Process(target=self._crawl, args=[jobs])
        p.start()

        return p
Example #41
0
def cmd_map(args):
    '''map command'''
    from mavflightview import mavflightview_mav, mavflightview_options
    #mestate.mlog.reduce_by_flightmodes(mestate.flightmode_selections)
    #setup and process the map
    options = mavflightview_options()
    options.condition = mestate.settings.condition
    options._flightmodes = mestate.mlog._flightmodes
    options.show_flightmode_legend = mestate.settings.show_flightmode
    if len(args) > 0:
        options.types = ','.join(args)
    [path, wp, fen, used_flightmodes, mav_type] = mavflightview_mav(mestate.mlog, options, mestate.flightmode_selections)
    child = Process(target=map_process, args=[path, wp, fen, used_flightmodes, mav_type, options])
    child.start()
    mestate.mlog.rewind()
Example #42
0
def startLaunchServiceProcess(launchDesc):
    """Starts a Launch Service process. To be called from subclasses.

    Args:
         launchDesc : LaunchDescription obj.
    """
    # Create the LauchService and feed the LaunchDescription obj. to it.
    launchService = LaunchService()
    launchService.include_launch_description(launchDesc)
    process = Process(target=launchService.run)
    # The daemon process is terminated automatically before the main program exits,
    # to avoid leaving orphaned processes running
    process.daemon = True
    process.start()

    return process
Example #43
0
def main():

    # To assign camera by name: put string(s) in list
    eye_cam_names = [
        "USB 2.0 Camera", "Microsoft", "6000", "Integrated Camera",
        "HD USB Camera"
    ]
    world_src = [
        "Logitech Camera", "(046d:081d)", "C510", "B525", "C525", "C615",
        "C920", "C930e"
    ]
    eye_src = (eye_cam_names,
               0), (eye_cam_names, 1
                    )  #first match for eye0 and second match for eye1

    # to assign cameras directly, using integers as demonstrated below
    # eye_src =  4 , 5 #second arg will be ignored for monocular eye trackers
    # world_src = 1

    # to use a pre-recorded video.
    # Use a string to specify the path to your video file as demonstrated below
    # eye_src = '/Users/mkassner/Downloads/000/eye0.mkv' , '/Users/mkassner/Downloads/eye.avi'
    # world_src = "/Users/mkassner/Downloads/000/world.mkv"

    # Camera video size in pixels (width,height)
    eye_size = (640, 480)
    world_size = (1280, 720)

    # on MacOS we will not use os.fork, elsewhere this does nothing.
    forking_enable(0)

    #g_pool holds variables. Only if added here they are shared across processes.
    g_pool = Global_Container()

    # Create and initialize IPC
    g_pool.pupil_queue = Queue()
    g_pool.quit = Value(c_bool, 0)
    g_pool.timebase = Value(c_double, 0)
    g_pool.eye_tx = []
    # make some constants avaiable
    g_pool.user_dir = user_dir
    g_pool.version = get_version(version_file)
    g_pool.app = 'capture'
    g_pool.binocular = binocular

    p_eye = []
    for eye_id in range(1 + 1 * binocular):
        rx, tx = Pipe(False)
        p_eye += [
            Process(target=eye,
                    args=(g_pool, eye_src[eye_id], eye_size, rx, eye_id))
        ]
        g_pool.eye_tx += [tx]
        p_eye[-1].start()

    world(g_pool, world_src, world_size)

    # Exit / clean-up
    for p in p_eye:
        p.join()
Example #44
0
 def __init__(self, title='MAVProxy: console'):
     if platform.system() == 'Darwin':
         forking_enable(False)
     textconsole.SimpleConsole.__init__(self)
     self.title = title
     self.menu_callback = None
     self.parent_pipe_recv, self.child_pipe_send = Pipe(duplex=False)
     self.child_pipe_recv, self.parent_pipe_send = Pipe(duplex=False)
     self.close_event = Event()
     self.close_event.clear()
     self.child = Process(target=self.child_task)
     self.child.start()
     self.child_pipe_send.close()
     self.child_pipe_recv.close()
     t = threading.Thread(target=self.watch_thread)
     t.daemon = True
     t.start()
Example #45
0
    def __init__(self, countries, importance, event_list, current_dt, server,
                 topic):

        Process.__init__(self)

        self.countries = countries
        self.importance = importance
        self.event_list = event_list
        self.current_dt = current_dt
        self.server = server
        self.topic = topic

        self.crawler = Crawler(EconomicIndicatorsSpiderSpider,
                               settings={'USER_AGENT': user_agent})

        self.crawler.signals.connect(reactor.stop,
                                     signal=scrapy_signals.spider_closed)
Example #46
0
class CrawlerScript():

    def __init__(self):
        self.crawler = None
        self.process = None
        self.items = []



    def _so(self):
        write_in_a_file('spider_opened 1', {}, "t.txt")

    def _sc(self):
        write_in_a_file('spider_closed', {'scraped items': len(self.items)}, "t.txt")

    def _so2(self):
        write_in_a_file('spider_opened 2', {}, "t.txt")

    def _item_scraped(self, item):
        write_in_a_file('item scraped', {'item': item}, "t.txt")
        self.items.append(item)

    def _crawl(self, queue, spider):
        self.crawler = CrawlerProcess(get_project_settings())
        self.crawler.crawl(spider)
        dispatcher.connect(self._item_scraped, signals.item_scraped)
        dispatcher.connect(self._so, signals.spider_opened)
        dispatcher.connect(self._so2, signals.spider_opened)
        dispatcher.connect(self._sc, signals.spider_closed)
        write_in_a_file('crawler start', {'db': dir(db), 'db.connection': dir(db.connection)}, "t.txt")
        print(dir(db.connection))
        db.connection.close()
        self.crawler.start()
        self.crawler.stop()
        write_in_a_file('crawler ended', {'qsize': queue.qsize() }, "t.txt")
        queue.put(self.items)

    def crawl(self, spider):
        queue = Queue()
        self.process = Process(target=self._crawl, args=(queue, spider,))
        self.process.start()
        write_in_a_file('crawler started', {'crawler': dir(self.crawler)}, "t.txt")
        return self.process, queue#p.join()
Example #47
0
def main():
    # To assign camera by name: put string(s) in list
    eye_src = ["Microsoft", "6000","Integrated Camera"]
    world_src = ["Logitech Camera","B525", "C525","C615","C920","C930e"]

    # to assign cameras directly, using integers as demonstrated below
    # eye_src = 1
    # world_src = 0

    # to use a pre-recorded video.
    # Use a string to specify the path to your video file as demonstrated below
    # eye_src = "/Users/mkassner/Pupil/datasets/eye2_fieldtest/eye 10.avi"
    # world_src = "/Users/mkassner/Downloads/2013_10_22_M25/000/world.avi"

    # Camera video size in pixels (width,height)
    eye_size = (640,360)
    world_size = (1280,720)


    # on MacOS we will not use os.fork, elsewhere this does nothing.
    forking_enable(0)

    # Create and initialize IPC
    g_pool = Temp()
    g_pool.pupil_queue = Queue()
    g_pool.eye_rx, g_pool.eye_tx = Pipe(False)
    g_pool.quit = RawValue(c_bool,0)
    # make some constants avaiable
    g_pool.user_dir = user_dir
    g_pool.rec_dir = rec_dir
    g_pool.version = version
    # set up subprocesses
    p_eye = Process(target=eye, args=(g_pool,eye_src,eye_size))

    # Spawn subprocess:
    p_eye.start()
    # On Linux, we need to give the camera driver some time before requesting another camera.
    sleep(0.5)
    # On MacOS cameras using MJPG compression (world camera) need to run in the main process.
    world(g_pool,world_src,world_size)

    # Exit / clean-up
    p_eye.join()
Example #48
0
def main():
    ProcessList = []
    for i in range(0, 100):
        ProcessList.append(Process(target=f, args=(i, )))
    for p in ProcessList:
        p.start()
    sleep(5)
    for p in ProcessList:
        p.join()
    print("main finished")
Example #49
0
 def init_marker_cacher(self):
     forking_enable(0) #for MacOs only
     from marker_detector_cacher import fill_cache
     visited_list = [False if x == False else True for x in self.cache]
     video_file_path =  self.g_pool.capture.src
     self.cache_queue = Queue()
     self.cacher_seek_idx = Value('i',0)
     self.cacher_run = Value(c_bool,True)
     self.cacher = Process(target=fill_cache, args=(visited_list,video_file_path,self.cache_queue,self.cacher_seek_idx,self.cacher_run,self.min_marker_perimeter))
     self.cacher.start()
Example #50
0
 def crawl_endpoint_to_file(
         self,
         ip_address=None,
         port=None,
         hostname=None,
         use_ssl=False,
         use_sni=False,
         start_urls=[],
         in_separate_process=True,
 ):
     """
     Start crawling the given endpoint using the given list of URLs and write the results to
     a local file.
     :param ip_address: The IP address to crawl.
     :param port: The port where the application resides.
     :param hostname: The hostname to submit alongside all requests to the remote endpoint.
     :param use_ssl: Whether or not to use SSL to connect to the remote web service.
     :param use_sni: Whether or not to use SNI to connect to the remote web service.
     :param start_urls: A list of URLs to start crawling from.
     :param in_separate_process: Whether or not to spawn off a separate process for the crawl. This
     enables us to call this method multiple times in the same process, as a Twisted reactor can only
     be started and stopped once per process.
     :return: A tuple containing (1) the string containing the local file path where crawling
     results are stored and (2) a ScrapyResultWrapper configured to process the contents of the file.
     """
     temp_file_path = FilesystemHelper.get_temporary_file_path()
     local_file_path = "%s-%s-%s:%s" % (temp_file_path, self.bot_name, ip_address, port)
     spider_kwargs = {
         "input_ip_address": ip_address,
         "input_start_urls": start_urls,
         "input_file_path": local_file_path,
         "input_hostname": hostname,
         "input_use_ssl": use_ssl,
         "input_use_sni": use_sni,
         "input_port": port,
     }
     pipeline_settings = self.__get_local_storage_item_pipeline()
     requested_hostname = hostname if hostname is not None else ip_address
     settings = self.get_scrapy_settings(item_pipeline=pipeline_settings, hostname=requested_hostname)
     crawling_config = {
         "spider_kwargs": spider_kwargs,
         "settings": settings,
     }
     if in_separate_process:
         process = Process(target=self.__crawl, kwargs=crawling_config)
         process.start()
         process.join()
         process.terminate()
     else:
         self.__crawl(**crawling_config)
     return local_file_path, ScrapyResultWrapper.from_file(local_file_path)
 def screenshot_endpoint(
     self,
     ip_address=None,
     port=None,
     hostname=None,
     use_ssl=False,
     use_sni=False,
     path="/",
     in_separate_process=False,
 ):
     """
     Take a screenshot of the given endpoint, save it to a local temporary file, and return the local
     file path.
     :param ip_address: The IP address where the web service resides.
     :param port: The port where the web service resides.
     :param hostname: The hostname to request.
     :param use_ssl: Whether or not to use SSL to request the endpoint.
     :param use_sni: Whether or not the endpoint uses SNI.
     :param path: The path of the resource to screenshot.
     :param in_separate_process: Whether or not to take the screenshot in a separate process. This is to
     address the incredibly long time that the Selenium webdriver can take when it hangs.
     :return: A tuple containing (1) the local file path where the screenshot was saved and (2) whether or not
     the screenshot was taken successfully.
     """
     logger.debug(
         "Now attempting to take a screenshot of the web service at %s:%s (%s). Hostname is %s, SNI support is %s."
         % (ip_address, port, "using SSL" if use_ssl else "plain HTTP",
            hostname, use_sni))
     self.__set_endpoint(
         ip_address=ip_address,
         port=port,
         hostname=hostname,
         use_ssl=use_ssl,
         use_sni=use_sni,
         path=path,
     )
     self._output_file_path = self.get_temporary_file_path()
     if in_separate_process:
         process = Process(target=self.__take_screenshot)
         try:
             process.start()
             process.join(config.selenium_screenshot_delay +
                          config.inspection_screenshot_join_timeout)
         except IOError as e:
             if e.errno == errno.EINTR:
                 logger.warning("Interrupted system call error received.")
             else:
                 raise e
         finally:
             if process.is_alive():
                 print("PROCESS IS ALIVE - PID IS %s" % (process.pid, ))
                 os.kill(process.pid, signal.SIGTERM)
     else:
         self.__take_screenshot()
     return self.output_file_path, FilesystemHelper.does_file_exist(
         self.output_file_path)
Example #52
0
    def add_export(self):
        # on MacOS we will not use os.fork, elsewhere this does nothing.
        forking_enable(0)

        logger.debug("Adding new export.")
        should_terminate = RawValue(c_bool, False)
        frames_to_export = RawValue(c_int, 0)
        current_frame = RawValue(c_int, 0)

        data_dir = self.data_dir
        start_frame = self.start_frame.value
        end_frame = self.end_frame.value
        plugins = []

        # Here we make clones of every plugin that supports it.
        # So it runs in the current config when we lauch the exporter.
        for p in self.g_pool.plugins:
            try:
                p_initializer = p.get_class_name(), p.get_init_dict()
                plugins.append(p_initializer)
            except AttributeError:
                pass

        out_file_path = verify_out_file_path(self.rec_name.value,
                                             self.data_dir)
        process = Process(target=export,
                          args=(should_terminate, frames_to_export,
                                current_frame, data_dir, start_frame,
                                end_frame, plugins, out_file_path))
        process.should_terminate = should_terminate
        process.frames_to_export = frames_to_export
        process.current_frame = current_frame
        process.out_file_path = out_file_path
        self.new_export = process
Example #53
0
    def add_exports(self):
        # on MacOS we will not use os.fork, elsewhere this does nothing.
        forking_enable(0)

        outfiles = set()
        for d in self.new_exports:
            logger.debug("Adding new export.")
            should_terminate = RawValue(c_bool,False)
            frames_to_export  = RawValue(c_int,0)
            current_frame = RawValue(c_int,0)
            start_frame = None
            end_frame = None
            data_dir = d
            plugins = []

            # Here we make clones of every plugin that supports it.
            # So it runs in the current config when we lauch the exporter.
            for p in self.g_pool.plugins:
                try:
                    p_initializer = p.get_class_name(),p.get_init_dict()
                    plugins.append(p_initializer)
                except AttributeError:
                    pass

            #make a unique name created from rec_session and dir name
            rec_session, rec_dir = data_dir.rsplit(os.path.sep,2)[1:]
            out_name = rec_session+"_"+rec_dir+".avi"
            out_file_path = os.path.join(self.destination_dir.value,out_name)
            if out_file_path in outfiles:
                logger.error("This export setting would try to save %s at least twice please rename dirs to prevent this. Skipping File"%out_file_path)
            else:
                outfiles.add(out_file_path)
                logger.info("Exporting to: %s"%out_file_path)

                process = Process(target=export, args=(should_terminate,frames_to_export,current_frame, data_dir,start_frame,end_frame,plugins,out_file_path))
                process.should_terminate = should_terminate
                process.frames_to_export = frames_to_export
                process.current_frame = current_frame
                process.out_file_path = out_file_path
                self.exports.append(process)
Example #54
0
def main():

    # IPC
    pupil_queue = Queue()
    timebase = Value(c_double, 0)

    cmd_world_end, cmd_launcher_end = Pipe()
    com0 = Pipe(True)
    eyes_are_alive = Value(c_bool, 0), Value(c_bool, 0)
    com1 = Pipe(True)
    com_world_ends = com0[0], com1[0]
    com_eye_ends = com0[1], com1[1]

    p_world = Process(
        target=world,
        args=(
            pupil_queue,
            timebase,
            cmd_world_end,
            com_world_ends,
            eyes_are_alive,
            user_dir,
            app_version,
            video_sources["world"],
        ),
    )
    p_world.start()

    while True:
        # block and listen for commands from world process.
        cmd = cmd_launcher_end.recv()
        if cmd == "Exit":
            break
        else:
            eye_id = cmd
            p_eye = Process(
                target=eye,
                args=(
                    pupil_queue,
                    timebase,
                    com_eye_ends[eye_id],
                    eyes_are_alive[eye_id],
                    user_dir,
                    app_version,
                    eye_id,
                    video_sources["eye%s" % eye_id],
                ),
            )
            p_eye.start()

    for p in active_children():
        p.join()
    logger.debug("Laucher exit")
Example #55
0
    def run(self, jobs):
        '''Start the Scrapy engine, and execute all jobs.  Return consolidated results
        in a single list.

        Parms:
          jobs ([Job]) - one or more Job objects to be processed.

        Returns:
          List of objects yielded by the spiders after all jobs have run.
        '''
        if not isinstance(jobs, collections.Iterable):
            jobs = [jobs]
        self.validate(jobs)

        p = Process(target=self._crawl, args=[jobs])
        p.start()
        p.join()
        p.terminate()

        return self.results.get()
Example #56
0
    def test_value(self, raw=False):
        if raw:
            values = [RawValue(code, value)
                      for code, value, _ in self.codes_values]
        else:
            values = [Value(code, value)
                      for code, value, _ in self.codes_values]

        for sv, cv in zip(values, self.codes_values):
            assert sv.value == cv[1]

        proc = Process(target=self._test, args=(values,))
        proc.daemon = True
        proc.start()
        proc.join()

        for sv, cv in zip(values, self.codes_values):
            assert sv.value == cv[2]
Example #57
0
    def start_process(self, command=None, billiard=False, *args, **kwargs):
        from eventkit_cloud.tasks.models import ExportTaskRecord
        from eventkit_cloud.tasks import TaskStates

        if billiard:
            proc = Process(daemon=False, *args, **kwargs)
            proc.start()
            self.store_pid(pid=proc.pid)
            proc.join()
            self.exitcode = proc.exitcode
        else:
            proc = subprocess.Popen(command, **kwargs)
            (self.stdout, self.stderr) = proc.communicate()
            self.store_pid(pid=proc.pid)
            self.exitcode = proc.wait()

        # We need to close the existing connection because the logger could be using a forked process which,
        # will be invalid and throw an error.
        connection.close()
        export_task = ExportTaskRecord.objects.filter(uid=self.task_uid).first()
        if export_task and export_task.status == TaskStates.CANCELED.value:
            from eventkit_cloud.tasks.exceptions import CancelException
            raise CancelException(task_name=export_task.export_provider_task.name,
                                  user_name=export_task.cancel_user.username)
Example #58
0
File: boot.py Project: Varato/qfbot
 def __init__(self, spider):
     Process.__init__(self)
     self.spider = spider