def __init__(self,arp_policy):
     self.arp_policy = arp_policy
     sdxPolicy = SDXPolicy()
     self.policy=sdxPolicy
     cwd = os.getcwd()
     sdx_autoconf=cwd+'/pyretic/sdx/sdx_auto.cfg'
     self.sdx = sdx_parse_config(cwd+'/pyretic/sdx/sdx_global.cfg',sdx_autoconf,auto)
     print "config parser completed "
     
     ''' Event handling for dynamic policy compilation '''  
     event_queue = Queue()
     ready_queue = Queue()
     
     ''' Dynamic update policy thread '''
     dynamic_update_policy_thread = Thread(target=dynamic_update_policy_event_hadler, args=(event_queue,ready_queue, self.update_policy))
     dynamic_update_policy_thread.daemon = True
     dynamic_update_policy_thread.start()   
     
     ''' Router Server interface thread '''
     # TODO: confirm if we need RIBs per participant or per peer!
     rs = route_server(event_queue, ready_queue, self.sdx)        
     rs_thread = Thread(target=rs.start)
     rs_thread.daemon = True
     rs_thread.start()
     
     ''' Update policies'''
     event_queue.put("init")
Exemple #2
0
	def Privmsg(self, msg):
		'''Parse for commands. Etc.'''
		try:
			Nick = self.Nick(msg)
			Host = self.Host(msg)
			Action = self.Action(msg)
			Location = self.Loc(msg)
			Text = self.Text(msg)
			Cmd = self.Cmd(msg)
			Msg = [Nick, Host, Action, Location, Text, Cmd]
			print("* [Privmsg] [{0}] <{1}> {2}".format(Location, Nick, Text))
			
			cmdVar = '$'
			'''If a command is called, check the hostname and access level of the person who called it, and if they have access, execute the command.'''				
			if Cmd.startswith(cmdVar) and Cmd[1:] in self._commands:
				check = self.allowed.levelCheck(Nick)[1]
				if check[1] <= self.command.cmds[Cmd[1:]][1]:
					if self.command.cmds[Cmd[1:]][2]:
						if Host == check[0]:
							t = Thread(target=(self.command.cmds[Cmd[1:]])[0](Msg))
							t.daemon = True
							t.start()
						else:
							self.SendNotice(Nick, "You do not have the required authority to use this command.")
					else:
						t = Thread(target=(self.command.cmds[Cmd[1:]])[0](Msg))
						t.daemon = True
						t.start()

		except Exception, e:
			print("* [Privmsg] Error")
			print(str(e))
Exemple #3
0
    def __init__(self, addresses, type=peers.ANALYSIS):
        super(AnalysisMaster, self).__init__(addresses=addresses, type=type)
        # initialize parameters of the subclass
        self.init_params()
        channel_count = len(self.config.get_param('channel_names').split(';'))
        self.buffer = self.create_buffer(channel_count, self._buffer_ret_func)

        # load classifier from file, if requested
        self.wisdom_path = self.config.get_param('wisdom_path') if self.config.has_param('wisdom_path') else None
        self._classifier = self._get_classifier()

        # array of requests to classification thread
        self.queue_classification = Queue()
        # array of requests to learning thread
        self.queue_learning = Queue()

        # separate thread for classification
        classification = Thread(target=self._run_classification)
        classification.daemon = True
        classification.start()

        # separate thread for learning
        learning = Thread(target=self._run_learning)
        learning.daemon = True
        learning.start()

        self.ready()
Exemple #4
0
def start_watching(tasks, syncmap, stores, cpool, period=60):
    # for imap stores run threads tracking remote mailboxes
    for stname, box, path in syncmap:
        store = stores[stname]
        if 'imapstore' in store:

            def makecon(con, store=store):
                if con:
                    logger.debug('trying to reconnect')
                    return cpool.reconnect(
                        con, store['pass'], store['ssltype'])
                else:
                    return cpool.get_or_create_connection(
                        store['host'], store['user'], store['pass'],
                        store['port'], store['ssltype'])

            callback = get_watch_callback(tasks, stname, box, path)
            t = Thread(target=watch_errors,
                       args=(makecon, path, callback, tasks))
            t.daemon = True
            t.start()
    # run a single thread tracking all maildir changes
    t = Thread(target=watch_local, args=(tasks, period))
    t.daemon = True
    t.start()
Exemple #5
0
 def start(self, shell_command):
     """Start a command running in the sandbox"""
     if self.is_alive:
         raise SandboxError("Tried to run command with one in progress.")
     working_directory = self.working_directory
     self.child_queue = Queue()
     shell_command = shlex.split(shell_command.replace('\\','/'))
     try:
         self.command_process = subprocess.Popen(shell_command,
                                                 stdin=subprocess.PIPE,
                                                 stdout=subprocess.PIPE,
                                                 stderr=subprocess.PIPE,
                                                 cwd=working_directory)
     except OSError:
         raise SandboxError('Failed to start {0}'.format(shell_command))
     self._is_alive = True
     stdout_monitor = Thread(target=_monitor_file,
                             args=(self.command_process.stdout, self.stdout_queue))
     stdout_monitor.daemon = True
     stdout_monitor.start()
     stderr_monitor = Thread(target=_monitor_file,
                             args=(self.command_process.stderr, self.stderr_queue))
     stderr_monitor.daemon = True
     stderr_monitor.start()
     Thread(target=self._child_writer).start()
Exemple #6
0
def run():
    if 'CLOUD_CLUSTER' in os.environ:
        name = os.environ['CLOUD_CLUSTER']
    elif len(sys.argv) > 1:
        name = sys.argv[1]
    else:
        raise Exception("No name provided, please provide it as 1st argument")

    cluster = Cluster(name)

    try:
        config = ElasticSearchConfig(cluster)
    except IOError:
        if len(sys.argv) > 2:
            config = ElasticSearchConfig(cluster, sys.argv[2])
        else:
            raise Exception("No config path provided, please provide it as 2nd argument")
    config.save()

    service = ElasticSearchService()

    if not service.running():
        service.start()

    webserver = Thread(target=app.run, )
    webserver.daemon = True
    webserver.start()

    backup = Thread(target=backup_thread, )
    backup.daemon = True
    backup.start()

    while True:  # Keep main thread alive until cancelled
        time.sleep(600)
Exemple #7
0
        def test_record_lock(self):
            shlock = SHLock()
            lock = RecordLock(shlock)

            shlock.acquire()
            self.assertRaises(LockingError, lock.lock, 1)
            shlock.release()
            with lock.lock(1):
                with lock.lock(1):
                    pass

            def dolock():
                with lock.lock(1):
                    time.sleep(0.1)

            t = Thread(target=dolock)
            t.daemon = True
            with lock.lock(1):
                t.start()
                t.join(0.2)
                self.assertTrue(t.is_alive())
            t.join(0.11)
            self.assertFalse(t.is_alive())

            t = Thread(target=dolock)
            t.daemon = True
            with lock.lock(2):
                t.start()
                t.join(0.11)
                self.assertFalse(t.is_alive())
def mqttStart(port=MQTT_BROKER_PORT):

  # Wait for POX intro message to disappear -G
  time.sleep(1)

  # Run MQTT broker and wait for it to start or give an error -G
  mqttBrokerProcess = spawn(MQTT_BROKER_COMMAND)
  successMessage = "\d+\: Opening ipv6 listen socket on port " + str(MQTT_BROKER_PORT)
  failureMessage = "\d+\: Error:.*\n"
  returnCode = mqttBrokerProcess.expect([successMessage, failureMessage])
  if returnCode == 0:
    log.info("MQTT broker created")
  if returnCode == 1:
    log.warning("Failed to create MQTT broker:\n " + mqttBrokerProcess.after)

  # Run MQTT subscriber client in separate thread -G
  mqttSubscriberThread = Thread(target=mqttSubscribe)
  mqttSubscriberThread.daemon = True
  mqttSubscriberThread.start()

  # Run MQTT WebSocket server in separate thread -G
  mqttWebServeThread = Thread(target=mqttWebServe)
  mqttWebServeThread.daemon = True
  mqttWebServeThread.start()

  # Keep broker running until thread exits, then close it -G
  try:
    mqttBrokerProcess.wait()
  except:
    mqttBrokerProcess.sendcontrol('c') # send control-c keyboard command
    mqttBrokerProcess.close()
Exemple #9
0
 def convert(self, config):
     self.reset()
     for file_path in self.file_paths:
         if self.cancel_event.is_set():
             break
         basename = os.path.basename(file_path)
         dirname = os.path.dirname(file_path)
         outfile_path = os.path.join(dirname, '[8bit]' + basename)
         tmpfile_path = os.path.join(dirname, '[TMP]' + basename)
         try:
             self.process = Popen(
                 [config['x264'], '--preset', config['preset'], '--tune',
                  config['tune'], '--crf', config['crf'], '--quiet',
                  file_path, '--output', tmpfile_path], stdout=PIPE,
                 stderr=STDOUT, bufsize=1, close_fds=ON_POSIX,
                 universal_newlines=True)
             t = Thread(target=enqueue_output, args=(self.process.stdout, self.output_queue))
             t.daemon = True
             t.start()
             self.process.wait()
             self.process = Popen(
                 [config['mkvmerge'], tmpfile_path,
                  '-D', file_path, '-o', outfile_path],
                 stdout=PIPE, stderr=STDOUT, bufsize=1,
                 close_fds=ON_POSIX, universal_newlines=True
             )
             t = Thread(target=enqueue_output, args=(self.process.stdout, self.output_queue))
             t.daemon = True
             t.start()
             self.process.wait()
         finally:
             if os.path.exists(tmpfile_path):
                 os.remove(tmpfile_path)
             self.file_done += 1
     self.all_done = True
Exemple #10
0
    def __init__(self):
        self.events = {}
        rest_trig.manager = self

        for location in get_locations_by_type(*EXPORTABLE).entries:
            logging.debug("Setting up export thread [Exporter%i].", location.id)
            event = Event()
            self.events[location.id] = event
            thread = Thread(
                target=exporting_loop,
                name="Exporter%i" % (location.id),
                args=(event, location)
            )
            thread.daemon = True
            thread.start()

        logging.debug("Setting up export job cleaning thread [ExportCleaner].")
        event = Event()
        self.events['clean'] = event
        thread = Thread(
            target=cleaning_loop,
            name="ExportCleaner",
            args=(event,)
        )
        thread.daemon = True
        thread.start()
Exemple #11
0
    def run(self):
        """
        Run the Component and Client threads, start listening or polling (depending on the configuration),
        and periodically check for received messages

        """
        if self.config is not None:

            # start listening if Client or Component are Listeners
            if (("Client" in self.config and "Listener" in self.config["Client"]) or
                ("Component" in self.config and "Listener" in self.config["Component"])):
                t_listen = Thread(target=self.listen_in_background)
                t_listen.daemon = True
                t_listen.start()

            # start polling if Client is Initiator (InitiatorComponent is handled in handle_message())
            if "Initiator" in self.config["Client"]:
                t_poll = Thread(target=self.poll_in_background)
                t_poll.daemon = True
                t_poll.start()
        else:

            # default: both Client and Component are listeners
            t_listen = Thread(target=self.listen_in_background)
            t_listen.daemon = True
            t_listen.start()

        # check for messages received from Client thread
        while True:
            if not self.from_cli.empty():
                [msg, identity] = self.from_cli.get()
                self.handle_message(msg, identity)
            sleep(0.1)
Exemple #12
0
    def __init__(self, arp_policy):
        ''' list of IP to MAC mapping '''
        self.arp_policy = arp_policy

        print "Initialize SDX"
        super(sdx_policy, self).__init__()

        print "SDX:", self.__dict__

        self.sdx = sdx_parse_config(cwd + '/pyretic/sdx/sdx_global.cfg')

        ''' Event handling for dynamic policy compilation '''
        event_queue = Queue()
        ready_queue = Queue()

        ''' Dynamic update policy thread '''
        dynamic_update_policy_thread = Thread(
            target=dynamic_update_policy_event_hadler,
            args=(
                event_queue,
                ready_queue,
                self.update_policy))
        dynamic_update_policy_thread.daemon = True
        dynamic_update_policy_thread.start()

        ''' Router Server interface thread '''
        # TODO: confirm if we need RIBs per participant or per peer!
        rs = route_server(event_queue, ready_queue, self.sdx)

        rs_thread = Thread(target=rs.start)
        rs_thread.daemon = True
        rs_thread.start()

        ''' Update policies'''
        event_queue.put("init")
Exemple #13
0
  def _setup_vsim_process(self):
    print('\n' + color.success('*** Starting Modelsim ***'))
    env = { 'MGC_WD': os.getcwd(), 'PATH': os.environ['PATH'] }
    self.p = subp.Popen(['vsim',  '-c', '-l', self.log_file], env=env, stdin=subp.PIPE, stderr=subp.PIPE, stdout=subp.PIPE)

    self.outq = queue.Queue()
    self.errq = queue.Queue()

    out_thread = Thread(target=enqueue_pipe, args=(self.p.stdout, self.outq))
    err_thread = Thread(target=enqueue_pipe, args=(self.p.stderr, self.errq))
    out_thread.daemon = True
    err_thread.daemon = True
    out_thread.start()
    err_thread.start()

    # Keep the process from dying on an elaboration error
    self.p.stdin.write('onElabError resume\n')

    # Define a dummy sentinel proc
    self.p.stdin.write('proc sentinel {} {}\n')
    self.p.stdin.flush()

    # Wait for Modelsim to start and process our commands
    while True:
      out = get_output(self.outq)
      if 'sentinel' in out: break
Exemple #14
0
 def connect(self, password):
     """
         Connect lastfm
         @param password as str/None
     """
     if self._goa:
         t = Thread(target=self._connect, args=('', '', True))
         t.daemon = True
         t.start()
     # Get username/password from GSettings/Secret
     elif Secret is not None and\
             Gio.NetworkMonitor.get_default().get_network_available():
         self._username = Lp().settings.get_value(
                                                'lastfm-login').get_string()
         if password is None:
             schema = Secret.Schema.new("org.gnome.Lollypop",
                                        Secret.SchemaFlags.NONE,
                                        SecretSchema)
             Secret.password_lookup(schema, SecretAttributes, None,
                                    self._on_password_lookup)
         else:
             t = Thread(target=self._connect, args=(self._username,
                                                    password, True))
             t.daemon = True
             t.start()
Exemple #15
0
    def start(self):
        """
        запустить дочерний процесс консоли
        self.env - переоперделенные переменные окружения
        read_output_thread - поток читатель из std_out
        check_idle_thread - поток, проверяет активное ли соединение,
                            JavaScript делает переодически запросы к консоли для получить новые буквы ответа,
                            и если давно небыло опроса, то пора выходить
        """
        self.process = subprocess.Popen(
            'cmd.exe',
            stdin=subprocess.PIPE,
            stdout=subprocess.PIPE,
            stderr=subprocess.PIPE,
            env=self.env,
            bufsize=1,
            cwd=self.physical_path
        )

        self.stdout_reader = OutputReader(self.process.stdout)
        self.stderr_reader = OutputReader(self.process.stderr)

        read_output_thread = Thread(target=self.read_output)
        read_output_thread.daemon = True
        read_output_thread.start()

        check_idle_thread = Thread(target=self.check_idle_thread)
        check_idle_thread.daemon = True
        check_idle_thread.start()
Exemple #16
0
def background_spawn(args, redirect_stderr, output_fn):

    def async_stdout(out, queue, output_fn):
        for line in iter(out.readline, b''):
            output_fn(line.replace('\r', '').replace('\n', ''))
        out.close()

    def async_stderr(out, queue, output_fn):
        for line in iter(out.readline, b''):
            output_fn(line.replace('\r', '').replace('\n', ''))
        out.close()

    if redirect_stderr:
        used_stderr = subprocess.PIPE
    else:
        used_stderr = subprocess.STDOUT
    p = subprocess.Popen(args, stdout=subprocess.PIPE, stderr=used_stderr,
                         bufsize=1, close_fds='posix' in sys.builtin_module_names)
    qo = Queue()
    to = Thread(target=async_stdout, args=(p.stdout, qo, output_fn))
    to.daemon = True
    to.start()
    if redirect_stderr:
        te = Thread(target=async_stderr, args=(p.stderr, qo, output_fn))
        te.daemon = True
        te.start()
Exemple #17
0
   def __init__(self, socket_map, mav_iface, send_interval, dispatcher):
      Bridge.__init__(self, socket_map, mav_iface, send_interval)
      self.dead = False
      recv_thread = Thread(target = self._receive)
      recv_thread.daemon = True
      send_thread = Thread(target = self._send)
      send_thread.daemon = True
      self.dispatcher = dispatcher
      self.auto_mode_flags = MAV_MODE_FLAG_SAFETY_ARMED \
         | MAV_MODE_FLAG_MANUAL_INPUT_ENABLED \
         | MAV_MODE_FLAG_STABILIZE_ENABLED \
         | MAV_MODE_FLAG_GUIDED_ENABLED \
         | MAV_MODE_FLAG_AUTO_ENABLED

      Bridge.__init__(self, socket_map, mav_iface, send_interval)
      self.csb = ControlSensorBits()
      self.sensors_present = self.csb.bits(['GYRO_3D', 'ACC_3D', 'MAG_3D',
         'PRESSURE_ABS', 'GPS', 'ANGLE_RATE_CONTROL', 'ATTITUDE_CTRL',
         'YAW_CTRL', 'ALTITUDE_CTRL', 'XY_CTRL', 'MOTOR_CTRL'])
      self.sensors_enabled = self.sensors_present
      self.sensors_health = self.sensors_present
      self._load_reader = LoadReader()
      self._power_reader = PowerReader(socket_map['power_mon'])
      recv_thread.start()
      send_thread.start()
      self._load_reader.start()
      self._power_reader.start()
      self._power_reader.wait_data()
Exemple #18
0
def main(name):
   global socket_map, font, caution_written
   socket_map = generate_map(name)
   t1 = Thread(target = cpu_reader)
   t1.daemon = True
   t1.start()

   t2 = Thread(target = pm_reader)
   t2.daemon = True
   t2.start()

   t3 = Thread(target = gps_reader)
   t3.daemon = True
   t3.start()

   socket = generate_map('aircomm_app')['aircomm_in']
   while True:
      try:
         data = [BCAST_NOFW, HEARTBEAT, int(voltage * 10), int(current * 10), int(load), mem_used(), critical]
         try:
            data += [gps[LAT], gps[LON]]
         except:
            pass
         socket.send(dumps(data))
      except Exception, e:
         print e
      sleep(1.0)
Exemple #19
0
 def _on_activate_link(self, link, item):
     """
         Update header with new link
         @param link as Gtk.LinkButton
         @param item as TuneIn Item
     """
     if item.TYPE == "link":
         self._scrolled.get_vadjustment().set_value(0.0)
         if self._current_url is not None:
             self._previous_urls.append(self._current_url)
         self.populate(item.URL)
     elif item.TYPE == "audio":
         # Only toolbar will get this one, so only create small in cache
         if Gio.NetworkMonitor.get_default().get_network_available():
             t = Thread(target=Lp().art.copy_uri_to_cache,
                        args=(item.LOGO, item.TEXT,
                              Lp().window.toolbar.artsize))
             t.daemon = True
             t.start()
             t = Thread(target=Lp().art.copy_uri_to_cache,
                        args=(item.LOGO, item.TEXT,
                              ArtSize.BIG))
             t.daemon = True
             t.start()
         Lp().player.load_external(item.URL, item.TEXT)
         Lp().player.play_this_external(item.URL)
     return True
Exemple #20
0
 def _on_row_activated(self, widget, row):
     """
         Play searched item when selected
         @param widget as Gtk.ListBox
         @param row as SearchRow
     """
     if Lp().player.is_party() or Lp().player.locked or Lp().player.queued:
         if row.is_track():
             if Lp().player.locked or Lp().player.queued:
                 if row.get_id() in Lp().player.get_queue():
                     Lp().player.del_from_queue(row.get_id())
                 else:
                     Lp().player.append_to_queue(row.get_id())
                 row.destroy()
             else:
                 Lp().player.load(Track(row.get_id()))
         elif Gtk.get_minor_version() > 16:
             popover = AlbumPopover(row.get_id(), [], [])
             popover.set_relative_to(row)
             popover.show()
         else:
             t = Thread(target=self._play_search, args=(row.get_id(),
                                                        row.is_track()))
             t.daemon = True
             t.start()
     else:
         t = Thread(target=self._play_search, args=(row.get_id(),
                                                    row.is_track()))
         t.daemon = True
         t.start()
Exemple #21
0
def run(*args):
    """Gets the functions directly via arguments, or indirectly
    via command decorator and builds command line utility
    executing the functions as command line parameters.
    """
    global functions
    functions = functions + list(args)

    parser = argparse.ArgumentParser(
        description=_info())

    auto.generate(parser, *functions)

    args = parser.parse_args()
    function = args.fn

    Shipper.startup()

    t = Thread(target=function, args=(args,))
    t.daemon = True
    t.start()

    def waiter(th):
        th.join()
        Shipper.shutdown()
        reactor.callFromThread(reactor.stop)

    w = Thread(target=waiter, args=(t,))
    w.daemon = True
    w.start()

    reactor.run()
 def client(self):
     """ the client task is to inform (heartbeating) the manager server
     and issue sending something to a random other client"""
     
     # start a heart beat thread to the manager server
     t = Thread(target=self.heart_beat_thread)
     t.daemon = True
     t.start()
     
     # start a receiving thread
     t = Thread(target=self.receiving_thread)
     t.daemon = True
     t.start()
     
     # start flow gen thread tp servers
     t = Thread(target=self.flow_gen_thread , args=('server',))
     t.daemon = True
     t.start()       
     
     if self.args.type != 'server':
         # start flow gen thread to other clients
         t = Thread(target=self.flow_gen_thread , args=('client',))
         t.daemon = True
         t.start()
              
     t.join()
Exemple #23
0
def successAction(request):
    target = request['target']
    if target == 'client':
        terminate = request['disconnect']
        if(terminate == "true"):
            terminateThreads()
            return
        connection = request['connection_type']
        if(connection == "ssh"):
            print("opening terminal!")
            command = "ssh -v "+getpass.getuser()+"@localhost -p "+request['local_port']
            tunnels_on_select = Thread(target=startSSH, args=[command])
            tunnels_on_select.daemon = True
            tunnels_on_select.start()
            addThreadsRunning(tunnels_on_select)
        elif(connection == "web"):
            print("opening browser!")
            url = "http://localhost:3000"
            tunnels_on_select = Thread(target=startWEB, args=[url])
            tunnels_on_select.daemon = True
            tunnels_on_select.start()
            addThreadsRunning(tunnels_on_select)
        elif(connection == "vnc"):
            print("opening vnc!")
            port = request['local_port']
            tunnels_on_select = Thread(target=startVNC, args=[port])
            tunnels_on_select.daemon = True
            tunnels_on_select.start()
            addThreadsRunning(tunnels_on_select)
    elif target == 'tunneler':
        successResponse = request['messageToClient']
        pubSubClient.publish(routing_key=routingKey, body=bytes(json.dumps(successResponse), 'utf-8'))
Exemple #24
0
	def run(self,job, override_requirements=False, run_local = False):
		if type(self.project) == bool:
			raise Exception("No Project selected")
		if type(self.project.session) == bool:
			raise Exception("No Session selected")
		if len(self.processes) > self.max_threads:
			raise Exception("Too many jobs running.")
		if job.can_run() or override_requirements:
			if run_local:
				t = Thread(target = job.run)
				t.daemon = True # thread dies with the program
				t.start()
				self.job_threads.append(t)
			else:
				start_line = copy(self.run_prefixes)
				start_line.extend([self.project.folder, self.project.session.path, str(job.job_number)])
				self.processes.append(subprocess.Popen(start_line, stdout=subprocess.PIPE, stderr=subprocess.PIPE))
				start_line = " ".join(start_line)
				self.process_jobs.append(job)
				job.process = "pid:" + str(self.processes[-1].pid)
				job.set_status("starting...")
				q = Queue()
				t = Thread(target=enqueue_output, args=(self.processes[-1].stdout, q))
				self.process_queue.append(q)
				self.process_thread.append(t)
				self.process_out.append(start_line+"\n")
				t.daemon = True # thread dies with the program
				t.start()
		else:
			raise Exception("Dependencies not satisfied")
Exemple #25
0
   def __init__(self, server_address, server_key, client_cert_prefix):
      self.context = zmq.Context.instance()

      # Load client private & public keys
      pub, sec = common.get_keys(client_cert_prefix)

      # Set up socket and connect to server
      self.server_socket = common.create_socket(self.context,
         zmq.DEALER,
         sec,
         pub,
         server_key);
      self.server_socket.connect(server_address)

      # Set up queue to push requests to
      request_queue = Queue()

      # List of reply listeners
      self.reply_listeners = []

      # Set up thread to delegate messages
      message_delegate = Thread(target=self.delegate_messages,\
         args=(self.server_socket, request_queue, self.reply_listeners))
      message_delegate.daemon = True
      message_delegate.start()

      # Set up thread to handle requests
      request_handler = Thread(target=self.reply_to_requests,\
         args=(request_queue, self.server_socket))
      request_handler.daemon = True
      request_handler.start()

      self.state = STATE_READY
Exemple #26
0
	def start(self, shell_command):
		"""Start a command running in the sandbox"""
		shell_command = "docker run -v /var/www/nycsl/problems/workers/workingPath:/var/www/nycsl/problems/workers/workingPath --privileged=true virtual_machine sh -c \'" + shell_command + "\'"
		print("Shell command")
		print(shell_command)
		if self.is_alive:
			raise SandboxError("Tried to run command with one in progress.")
		working_directory = self.working_directory
		self.child_queue = Queue()
		shell_command = shlex.split(shell_command.replace('\\','/'))
		try:
			self.command_process = subprocess.Popen(shell_command,
													stdin=subprocess.PIPE,
													stdout=subprocess.PIPE,
													stderr=subprocess.PIPE,
													universal_newlines=True,
													cwd=working_directory)
		except OSError:
			print("There was an error")
			raise SandboxError('Failed to start {0}'.format(shell_command))
		self._is_alive = True
		stdout_monitor = Thread(target=_monitor_file,
								args=(self.command_process.stdout, self.stdout_queue))
		stdout_monitor.daemon = True
		stdout_monitor.start()
		stderr_monitor = Thread(target=_monitor_file,
								args=(self.command_process.stderr, self.stderr_queue))
		stderr_monitor.daemon = True
		stderr_monitor.start()
		Thread(target=self._child_writer).start()
Exemple #27
0
def main():
	parser = ArgumentParser(description='Multithreaded cat. Reads stdin and writes to stdout in parallel.')
	args = parser.parse_args()  # ignored at the moment.

	sin_fd = sys.stdin.fileno()
	sout = open(sys.stdout.fileno(), "wb", buffering=0, closefd=False)
	read_max_size = 1000000

	def read():
		# This has the behavior which we actually want:
		#  - If there are <= read_max_size bytes available, it will return those immediately,
		#    i.e. it will not block to wait until read_max_size bytes are available.
		#  - If there are 0 bytes available, it will block and wait until some bytes are available.
		v = os.read(sin_fd, read_max_size)
		return v or None
	def write(v):
		sout.write(v)
		sout.flush()

	pipe = Pipe()
	pipe.read = read
	pipe.write = write
	reader = Thread(name="reader", target=pipe.reader_loop)
	writer = Thread(name="writer", target=pipe.writer_loop)
	reader.daemon = True
	writer.daemon = True
	reader.start()
	writer.start()
	try:
		reader.join()
		writer.join()
	except KeyboardInterrupt:
		pass
Exemple #28
0
def runProcess(commandline):
  """
  Run a process
  :param commandline: command line 
  :return:the return code
  """
  global finished
  debug ("Executing : %s" % commandline)
  exe = subprocess.Popen(commandline,
                         stdin=None,
                         stdout=subprocess.PIPE,
                         stderr=subprocess.PIPE,
                         shell=False,
                         bufsize=1, 
                         close_fds=ON_POSIX)

  t = Thread(target=print_output, args=("stdout", exe.stdout, False))
  t.daemon = True 
  t.start()
  t2 = Thread(target=print_output, args=("stderr", exe.stderr, True))
  t2.daemon = True 
  t2.start()

  debug("Waiting for completion")
  while exe.poll() is None:
    # process is running; grab output and echo every line
    time.sleep(1)
  debug("completed with exit code : %d" % exe.returncode)
  finished = True
  t.join()
  t2.join()
  return exe.returncode
    def __init__(self):
        self.p = Popen(["nice", "-n", "15", "avconv", "-i", "-",
                        "-probesize", "2048", "-flags", "low_delay", "-f",
                        "rawvideo", "-pix_fmt", 'rgb24', "-"],
                       stdin=PIPE, stdout=PIPE, stderr=open('/home/supun/error.txt', 'w'),
                       bufsize=0, preexec_fn=set_death_signal_int)
        t = Thread(target=self.enqueue_output, args=(self.p.stdout, (360, 640)))
        t.daemon = True
        t.start()

        self.frame_queue = Queue.Queue()

        self.time_queue = Queue.Queue()
        self.local_time_queue = Queue.Queue()

        # counting the tuples emitted
        self.emit_count = 0
        # frames received
        self.tuple_count = 0
        # weather we have removed the times from the time queue
        self.time_removed = 0
        # used to keep the difference between frames recieved and messages emitted
        self.diff = []

        # the control modules
        self.tracking = Tracking.Tracking()
        self.planing = Planning.Planning()

        send_thread = Thread(target=self.emit_message)
        send_thread.daemon = True
        send_thread.start()
Exemple #30
0
    def xreader():
        in_queue = Queue(buffer_size)
        out_queue = Queue(buffer_size)
        out_order = [0]
        # start a read worker in a thread
        target = order_read_worker if order else read_worker
        t = Thread(target=target, args=(reader, in_queue))
        t.daemon = True
        t.start()
        # start several handle_workers
        target = order_handle_worker if order else handle_worker
        args = (in_queue, out_queue, mapper, out_order) if order else (
            in_queue, out_queue, mapper)
        workers = []
        for i in xrange(process_num):
            worker = Thread(target=target, args=args)
            worker.daemon = True
            workers.append(worker)
        for w in workers:
            w.start()

        sample = out_queue.get()
        while not isinstance(sample, XmapEndSignal):
            yield sample
            sample = out_queue.get()
        finish = 1
        while finish < process_num:
            sample = out_queue.get()
            if isinstance(sample, XmapEndSignal):
                finish += 1
            else:
                yield sample
Exemple #31
0
                variacaoY.append(y_primeiro - valoresY[k])

                tamanhoLista = len(variacaoX)
                calcularVariacao()

                print(variacaoX, variacaoY)

        sleep(0.1)


#meu_serial = None
meu_serial = Serial("COM35", baudrate=9600, timeout=0.1)
print("Serial: ok")

thread = Thread(target=serial)
thread.daemon = True
thread.start()

imagem = None

timerCoordDim = Timer(1.0, enviarCoordenadasDimensoes)
timerCoordDim.start()

drone = Tello("TELLO-C7AC08", test_mode=False)
#drone = Tello("TELLO-D023AE", test_mode=True)
drone.inicia_cmds()

xMaior = 0
yMaior = 0
comprimentoMaior = 0
alturaMaior = 0
Exemple #32
0
        def scan_wifi():

            networks = pandas.DataFrame(columns=[
                "Name_SSID", "BSSID", "dBm_Signal", "Channel", "Crypto"
            ])  # Data Frames
            networks.set_index(
                "Name_SSID",
                inplace=True)  # index SSID Name Wifi exambles : Zain..
            try:
                chack_Wifi = sub.getoutput(["iwconfig"])
                global find_card
                find_card = re.findall("wl\w+", str(chack_Wifi))[0]
                print(find_card)
                try:
                    print("[[#]] Start Mode:Monitor")
                    sub.call(["ifconfig", find_card, "down"])
                    sub.call(["iwconfig", find_card, "mode", "Monitor"])
                    sub.call(["ifconfig", find_card, "up"])
                except KeyboardInterrupt:
                    print("[[#]] Start Mode:Manged")
                    sub.call(["ifconfig", find_card, "down"])
                    sub.call(["iwconfig", find_card, "mode", "Manged"])
                    sub.call(["ifconfig", find_card, "up"])
            except:
                find_card = str(input("WifiCard #>  "))

            def callback(packet):
                if packet.haslayer(Dot11Beacon):
                    # extract the MAC address of the network
                    ssid = packet[Dot11Elt].info.decode()

                    bssid = packet[Dot11].addr2
                    # get the name of it
                    try:
                        dbm_signal = packet.dBm_AntSignal
                    except:
                        dbm_signal = "N/A"
                    # extract network stats
                    stats = packet[Dot11Beacon].network_stats()
                    # get the channel of the AP
                    channel = stats.get("channel")
                    # get the crypto
                    crypto = stats.get("crypto")
                    networks.loc[ssid] = (bssid, dbm_signal, channel, crypto)

            def print_all():
                while True:
                    os.system("clear")
                    print(networks)
                    time.sleep(0.5)

            def change_channel():
                ch = 1
                while True:
                    os.system(f"iwconfig {find_card} channel {ch}")
                    # switch channel from 1 to 14 each 0.5s
                    ch = ch % 14 + 1
                    time.sleep(0.5)

            printer = Thread(target=print_all)
            printer.daemon = True
            printer.start()
            channel_changer = Thread(target=change_channel)
            channel_changer.daemon = True
            channel_changer.start()

            # start sniffing Scan Wifi Card

            snif = sniff(prn=callback, iface=find_card)
            networks.to_html("Wifi.html")
Exemple #33
0
                if len(D) > 0:
                    _queue.put(D)
                else:
                    desco = True
                    break
    _queue.put(_eof_q)
    print('socket done')


def _thread_wproc():
    print('vlc')
    p = subprocess.Popen(["vlc", '-'], stdin=subprocess.PIPE)
    while True:
        b = _queue.get()
        if b is _eof_q:
            _queue.task_done()
            break
        p.stdin.write(b)
    print('vlc done')


t1 = Thread(target=_thread_rfile)
t1.daemon = True
t2 = Thread(target=_thread_wproc)
t2.daemon = True
t2.start()
print('t2 ok')
t1.start()
print('t1 ok')
t1.join()
Exemple #34
0
def listen_message_broker(json):
    thread = Thread(target=consumer,
                    args=(current_app._get_current_object(), ))
    thread.daemon = True
    thread.start()
Exemple #35
0
    return json.dumps(mav.quick())


@app.route("/mav/commands")
def commands_get():
    return jsonify(mav.getCommands())


@app.route("/mav/commands/<command>/<lat>/<lon>/<alt>")
def command_append(command, lat, lon, alt):
    mav.setCommand(command, lat, lon, alt)
    return "Success"


@app.route("/mav/commands/<command>/<lat>/<lon>/<alt>/<ind>")
def command_insert(command, lat, lon, alt, ind):
    mav.setCommand(command, lat, lon, alt, ind)
    return "Success"


if __name__ == "__main__":
    mav.connect()

    interop.login()
    interop_telem_thread = Thread(target=interop.submit_telemetry,
                                  args=(mav, ))
    interop_telem_thread.daemon = True
    interop_telem_thread.start()

    app.run(port=5000, debug=False)
Exemple #36
0
if cfg_get('accounts_file'):
    account_provider = CSVAccProvider()
elif cfg_get('pgpool_url') and cfg_get('pgpool_num_accounts') > 0:
    account_provider = PGPoolAccProvider()
else:
    log.error(
        "No idea which accounts you want to check. Use either --accounts-file or --pgpool-url with --pgpool-num-accounts.")
    sys.exit()

init_account_info_file()

num_threads = cfg_get('threads')
log.info("Checking {} accounts with {} threads.".format(account_provider.get_num_accounts(), num_threads))
for i in range(0, num_threads):
    t = Thread(target=check_thread, args=(account_provider,))
    t.daemon = True
    t.start()
    threads.append(t)

# Wait for threads to end
for t in threads:
    t.join()

log.info("All {} accounts processed.".format(account_provider.num_provided))
log_results('good')
log_results('blind')
log_results('captcha')
log_results('banned')
log_results('error')

if acc_stats['good'] == 0 and acc_stats['blind'] > 0:
Exemple #37
0
# initialize joystick, if using joystick
if CONSTANTS.USING_JOYSTICK:
	LOGGER.Debug("Initializing joystick...")
	pygame.init()
	pygame.joystick.init()
	joystick1 = pygame.joystick.Joystick(0)
	joystick1.init()
	jReader = JoystickReader(joystick1)

ceaseAllMotorFunctions()

if CONSTANTS.USING_MOTOR_BOARD:
	LOGGER.Debug("Initializing motor board thread...")
	#Sets up an isr essentially using the motorCommunicationThread()
	motorCommThread = Thread(target=motorCommunicationThread)
	motorCommThread.daemon = True
	motorCommThread.start()

if CONSTANTS.USING_SENSOR_BOARD:
	LOGGER.Debug("Initializing sensor board thread...")
	#sets up an isr essentially using the sensorCommunicationThread
	sensorCommThread = Thread(target=sensorCommunicationThread)
	sensorCommThread.daemon = True
	sensorCommThread.start()


# final line before entering main loop
robotEnabled = True


BEEPCODES.happy1()
Exemple #38
0
def start_importing(es, cliargs, logger):
    """Start importing s3 inventory file function.
    """

    for i in range(4):
        thread = Thread(target=csv_file_reader, args=(s3queue, ))
        thread.daemon = True
        thread.start()

    # start importing S3 inventory file(s)
    inventory_files = cliargs['s3']
    logger.info('Importing %s S3 inventory file(s)...' % len(inventory_files))

    # add fake disk space to index with path set to /s3
    data = {
        "path": '/s3',
        "total": 0,
        "used": 0,
        "free": 0,
        "available": 0,
        "indexing_date": datetime.utcnow().strftime("%Y-%m-%dT%H:%M:%S.%f")
    }
    es.index(index=cliargs['index'], doc_type='diskspace', body=data)

    # create fake root directory doc
    time_utc_now = datetime.utcnow().strftime("%Y-%m-%dT%H:%M:%S")
    time_utc_epoch_start = "1970-01-01T00:00:00"
    root_dict = {}
    root_dict['filename'] = "s3"
    root_dict['path_parent'] = "/"
    root_dict["filesize"] = 0
    root_dict["items"] = 1  # 1 for itself
    root_dict["items_files"] = 0
    root_dict["items_subdirs"] = 0
    root_dict["last_modified"] = time_utc_epoch_start
    root_dict["tag"] = ""
    root_dict["tag_custom"] = ""
    root_dict["indexing_date"] = time_utc_now
    root_dict["worker_name"] = "main"
    root_dict["change_percent_filesize"] = ""
    root_dict["change_percent_items"] = ""
    root_dict["change_percent_items_files"] = ""
    root_dict["change_percent_items_subdirs"] = ""
    es.index(index=cliargs['index'], doc_type='directory', body=root_dict)
    diskover.add_crawl_stats(es, cliargs['index'], '/s3', 0)

    # add all s3 inventory files to queue
    for file in inventory_files:
        s3queue.put((file, cliargs))

    # set up progress bar
    bar = diskover.progress_bar('Importing')
    bar.start()

    if not cliargs['quiet'] and not cliargs['debug'] and not cliargs['verbose']:
        i = 1
        while s3queue.qsize() > 0:
            try:
                percent = int("{0:.0f}".format(
                    100 * ((len(inventory_files) - s3queue.qsize()) /
                           float(len(inventory_files)))))
                bar.update(percent)
            except ZeroDivisionError:
                bar.update(0)
            except ValueError:
                bar.update(0)
            time.sleep(.5)
            i += 1
        bar.finish()

    # wait for queue to be empty
    s3queue.join()
Exemple #39
0
# keep searching until there are enough to link to
while True:
    print "Awaiting " + str(num_links - len(link_addresses)) + " links"
    fresh_addresses = list_available_addresses()
    link_addresses.extend(fresh_addresses)
    if len(link_addresses) < num_links:
        sleep(1)
    else:
        break

# connect to the links
links = [connect_link(address) for address in link_addresses[:num_links]]

print "Connected to all required sensor units, now starting music"

# spawn threads to handle sensor updates
for (index, (reader, writer)) in enumerate(links):
    thread = Thread(  # spawn new sensor thread
        target=reader_loop, args=(reader, index))
    thread.daemon = True  # stop if main thread stops
    thread.start()

# trigger sounds on a loop
for synth in synths:
    synth.sound.play(-1)
    synth.sound.set_volume(0)

# wait while sensor readings trigger synths on and off
while True:
    sleep(0.05)
Exemple #40
0
                        str(work_minutes) + '/' + str(work_seconds)))
                del entered_people[saved_person]
            data = conn.recv(4096)
            data = data.decode('utf-8')
            if data == 'unlocked':
                print('UNLOCKED')
                choose_machine = '0'
                good_rfid = 0
                good_pin = 0
                access_or_not = 0
            time.sleep(1)


try:
    f1 = Thread(target=network)
    f1.daemon = True
    f1.start()
    f2 = Thread(target=get_rfid)
    f2.daemon = True
    f2.start()
    f3 = Thread(target=get_image)
    f3.daemon = True
    f3.start()
    f4 = Thread(target=get_pin)
    f4.daemon = True
    f4.start()
    f5 = Thread(target=app)
    f5.daemon = True
    f5.start()
    f6 = Thread(target=send_door)
    f6.daemon = True
Exemple #41
0
	def start_thread(self):
		p = Thread(target=self.foo, args=(10,))
		#multiprocessing.Process(target=self.foo, name="Foo", args=(10,))
		p.daemon = True
		p.start()
Exemple #42
0
    from queue import Queue, Empty  # python 3.x

ON_POSIX = 'posix' in sys.builtin_module_names


def enqueue_output(out, queue):
    for line in iter(out.readline, b''):
        queue.put(line)
        print("mah line " + line)
    out.close()


p = Popen(['iperf', '-s', '-i', '1', '-p', '5010', '-y', 'c'],
          stdout=PIPE,
          bufsize=1,
          close_fds=ON_POSIX)
q = Queue()
t = Thread(target=enqueue_output, args=(p.stdout, q))
t.daemon = True  # thread dies with the program
t.start()

# ... do other things here

# read line without blocking
while True:
    try:
        line = q.get_nowait()  # or q.get(timeout=.1)
    except Empty:
        print('no output yet')
    time.sleep(1)
Exemple #43
0
	def update_check(self):
		p = Thread(target=self.thread_get_updates)
		p.daemon = True
		p.start()
Exemple #44
0
def main(args, pacu_main):
    ###### Don't modify these. They can be removed if you are not using the function.
    args = parser.parse_args(args)
    print = pacu_main.print
    get_regions = pacu_main.get_regions
    install_dependencies = pacu_main.install_dependencies
    ######

    # Make sure that this only includes regions that are available for the service you are working with. Some services don't require a region at all
    regions = get_regions('s3')

    # Attempt to install the required external dependencies, exit this module if that fails
    if not install_dependencies(module_info['external_dependencies']):
        return {'buckets': 0, 'listable': 0}

    # List of affixes to append to domain.com and domain in the form of affix.domain.com and affix-domain
    affixes = []

    # Read default keyword list if bruteforcing
    if args.brute:
        with open('./dependencies/Buckets.txt', 'r') as f:
            affixes += [x.strip() for x in f.readlines()]
    # Read filename of user-provided keywords
    elif args.file:
        with open(args.file, 'r') as f:
            affixes += [x.strip() for x in f.readlines()]
    else:
        affixes = []

    # if args.sublister:
    #     from Sublist3r import sublist3r
    #     subdomains = sublist3r.main(args.domain, 30, None, None, False, verbose=True, enable_bruteforce=args.subbrute, engines=None)

    print('Generating bucket permutations list...')
    buckets = create_bucket_list(args.domain, affixes=affixes)

    # for subdomain in subdomains:
    #     subucks = create_bucket_list(subdomain, affixes=affixes)
    #     buckets = buckets.union(subucks)

    for region in regions:
        for bucket in buckets:
            bucket_q.put((region, bucket))

    print('Generated {} bucket permutations. Beginning search across {} regions.'.format(len(buckets), len(regions)))

    global bucket_q_size
    bucket_q_size = bucket_q.qsize()

    for i in range(args.threads):
        t = Thread(target=bucket_worker, args=())
        t.daemon = True
        t.start()

    bucket_q.join()

    print('')
    print('[+] Results:')
    print('    {}Number of Buckets that Exist: {}{}'.format(Y, len(bucketlist['exists']), W))
    print('    {}Number of Buckets that are Listable: {}{}'.format(G, len(bucketlist['listable']), W))

    summary_data = {
        'buckets': len(bucketlist['exists']),
        'listable': len(bucketlist['listable'])
    }

    if args.grep and bucketlist['listable']:
        print('[.] Grepping for keywords in listable buckets from {}'.format(args.grep))

        with open(args.grep, 'r') as file:
            keywords = [x.strip().lower() for x in file.readlines() if x.strip()]

        for domain, region in bucketlist['listable']:
            command = 'aws s3 ls s3://{}/ --region {} --recursive'.format(domain, region)
            command = command.split(' ')
            # with open(os.devnull, 'w') as FNULL:
            output = subprocess.run(command, shell=True, stderr=None)
            output = output.lower()
            if any(x in output for x in keywords):
                print('[!] Found sensitive file on bucket {} in region {}'.format(domain, region))

    print('{} completed.\n'.format(module_info['name']))
    return summary_data
Exemple #45
0
 def start(self):
     # start a thread to read frames from the file video stream
     t = Thread(target=self.update, args=())
     t.daemon = True
     t.start()
     return self
Exemple #46
0
	def download_updates(self):
		p = Thread(target=self.thread_download_updates)
		p.daemon = True
		p.start()
Exemple #47
0
def check_temp(data):
    global cap  # var for waiting the capture
    frame = bridge.imgmsg_to_cv2(data, 'bgr8')[80:160, 100:220]  # get frame
    hsv = cv.cvtColor(frame, cv.COLOR_BGR2HSV)

    # get binarized images in each color
    red = cv.inRange(hsv, (165, 70, 158), (255, 209, 255))
    yellow = cv.inRange(hsv, (10, 80, 88), (49, 220, 225))
    green = cv.inRange(hsv, (26, 28, 60), (135, 162, 225))

    # count non-zero pixels
    color = {
        'r': cv.countNonZero(red),
        'y': cv.countNonZero(yellow),
        'g': cv.countNonZero(green)
    }

    temperature[n] = max(color, key=color.get)  # get max key
    print(n, color, '     ', temperature[n])

    # draw circle in centor of colored spot (only need color)
    try:
        if temperature[n] == 'r':
            moments = cv.moments(red, 1)  # get moments for find the center
            dM01 = moments['m01']
            dM10 = moments['m10']
            dArea = moments['m00']
            x = int(dM10 / dArea)
            y = int(dM01 / dArea)
            cv.circle(frame, (x, y), 5, (0, 0, 255), -1)  # draw
        if temperature[n] == 'y':
            moments = cv.moments(yellow, 1)
            dM01 = moments['m01']
            dM10 = moments['m10']
            dArea = moments['m00']
            x = int(dM10 / dArea)
            y = int(dM01 / dArea)
            cv.circle(frame, (x, y), 5, (0, 255, 255), -1)
        if temperature[n] == 'g':
            moments = cv.moments(green, 1)
            dM01 = moments['m01']
            dM10 = moments['m10']
            dArea = moments['m00']
            x = int(dM10 / dArea)
            y = int(dM01 / dArea)
            cv.circle(frame, (x, y), 5, (0, 255, 0), -1)
    except ZeroDivisionError:
        print('zero')

    color_debug.publish(bridge.cv2_to_imgmsg(
        frame, 'bgr8'))  # publish to topic (for web-video-server)

    # led and print if covid
    if max(color, key=color.get) == 'y' or max(color, key=color.get) == 'r':
        t = Thread(target=lenta)
        t.daemon = True
        t.start()
        print('sbrosheno')

    # unsubscribe from topic (get only one capture)
    image_sub.unregister()
    cap = True
Exemple #48
0
 def start(self):
     t = Thread(target=self.explode)
     t.daemon = True
     self.log = GUILog()
     t.start()
Exemple #49
0
                status = 0
                anafi1(1)
                anafi2(1)
                anafi3(1)
                print "ALL DRONES TAKEOFF"
            elif x0 == 2:
                status = 0
                anafi1(2)
                anafi2(2)
                anafi3(2)
                print "ALL DRONES HOVER"
            elif x0 == 3:
                print "Starting Mission Thread"
                status = 1
                mission = Thread(target=timedMission)
                mission.daemon = True
                mission.start()
            elif x0 == 4:
                print "STOP"
            elif x0 == 5:
                print "LAND"
            elif x0 == 6:
                print "exit"

            else:
                print('\x1bc')
                print(
                    colored(
                        ('Invalid action selected, please select again! \n'),
                        "red"))
                x0, c = Print_Drone_Actns(acn, acn_N)
Exemple #50
0
 def start(self):
     """set the thread start conditions"""
     self.stopped = False
     T = Thread(target=self.update)
     T.daemon = True # run in background
     T.start()
Exemple #51
0
from threading import Thread

def ftt():
    print('thread')

if __name__ == '__main__':
    get_level_thread = Thread(target = ftt)
    get_level_thread.daemon = True
    get_level_thread.start() 
    #while True:
       # print('main')
Exemple #52
0
    data_manager = utils.data_manager.DataManager(db_wrapper, instance_id)
    db_wrapper, db_pool_manager = DbFactory.get_wrapper(args)
    version = MADVersion(args, data_manager)
    version.get_version()

    MappingManagerManager.register('MappingManager', MappingManager)
    mapping_manager_manager = MappingManagerManager()
    mapping_manager_manager.start()
    mapping_manager_stop_event = mapping_manager_manager.Event()
    mapping_manager: MappingManager = mapping_manager_manager.MappingManager(
        db_wrapper, args, data_manager, True)

    ws_server = WebsocketServer(args, None, db_wrapper, mapping_manager, None,
                                True)
    t_ws = Thread(name='scanner', target=ws_server.start_server)
    t_ws.daemon = False
    t_ws.start()

    jobstatus: dict = {}

    device_Updater = deviceUpdater(ws_server, args, jobstatus)

    logger.success(
        'Starting MADmin on port {} - Open a browser, visit MADmin and go to "Settings"',
        int(args.madmin_port))
    t_flask = Thread(name='madmin',
                     target=start_madmin,
                     args=(args, db_wrapper, ws_server, mapping_manager,
                           data_manager, device_Updater, jobstatus))
    t_flask.daemon = False
    t_flask.start()
        sys.exit('[' + R + '-' + W + '] Please run as root')
    clients_APs = []
    APs = []
    DN = open(os.devnull, 'w')
    lock = Lock()
    args = parse_args()
    args.skip = list(map(str.lower, args.skip))
    # lowercase bssids while leaving essids intact
    args.accesspoint = set(_.lower() if ':' in _ else _
                           for _ in args.accesspoint)
    monitor_on = None
    mon_iface = get_mon_iface(args)
    conf.iface = mon_iface
    mon_MAC = mon_mac(mon_iface)
    first_pass = 1

    # Start channel hopping
    hop = Thread(target=channel_hop, args=(mon_iface, args))
    hop.daemon = True
    hop.start()

    signal(SIGINT, stop)

    try:
        sniff(iface=mon_iface, store=0, prn=cb)
    except Exception as msg:
        remove_mon_iface(mon_iface)
        os.system('service network-manager restart')
        print '\n[' + R + '!' + W + '] Closing'
        sys.exit(0)
def create_daemon(target, args=(), kwargs=None):
    """Helper to quickly create and start a thread with daemon = True"""
    t = Thread(target=target, args=args, kwargs=kwargs)
    t.daemon = True
    t.start()
    return t
Exemple #55
0
 def _start_crawler_thread(self):
     t = Thread(target=self.crawler_process.start,
                kwargs={'stop_after_crawl': False})
     t.daemon = True
     t.start()
Exemple #56
0
 def __init__(self, servidor):
     self.servidor = servidor
     thread_personagens = Thread(target= self.run, args=())
     thread_personagens.daemon = True                     
     thread_personagens.start()
     ThreadUpdate.threads.append(thread_personagens)
Exemple #57
0
def worker_send(s):
    while True:
        request = q_sender.get()
        s.sendall(request.parse_headers().encode('utf-8'))
        q_sender.task_done()


if __name__ == '__main__':
    Window.size = (800, 600)
    Window.minimum_width, Window.minimum_height = Window.size

    context = ssl.create_default_context(ssl.Purpose.SERVER_AUTH,
                                         cafile='certs/server.crt')
    context.load_cert_chain('certs/client.crt', 'certs/client.key')

    sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
    sock.connect((HOST, PORT))

    ssl_socket = context.wrap_socket(sock, server_hostname=HOST)

    worker_thread = Thread(target=worker, args=(ssl_socket, ))
    worker_thread.daemon = True
    worker_thread.start()

    sender_thread = Thread(target=worker_send, args=(ssl_socket, ))
    sender_thread.daemon = True
    sender_thread.start()

    SribbleioApp().run()
Exemple #58
0
 def start_polling(self):
     if self.poll:
         t = Thread(target=self.poll_action)
         t.daemon = True
         t.start()
Exemple #59
0
    async def __register(self, websocket):
        # await websocket.recv()
        log.info("Client registering....")
        try:
            id = str(websocket.request_headers.get_all("Origin")[0])
        except IndexError:
            # TODO: list IP or whatever...
            log.warning(
                "Client from %s tried to connect without Origin header" %
                str(websocket))
            return False
        if self.auths:
            try:
                authBase64 = str(
                    websocket.request_headers.get_all("Authorization")[0])
            except IndexError:
                log.warning(
                    "Client from %s tried to connect without auth header" %
                    str(websocket))
                return False
        if self.__current_users.get(id, None) is not None:
            log.warning("Worker for %s is already running" % str(id))
            return False
        elif self.auths and authBase64 and not check_auth(
                authBase64, self.args, self.auths):
            return False

        lastKnownState = {}
        client_mapping = self.device_mappings[id]
        daytime_routemanager = self.routemanagers[
            client_mapping["daytime_area"]].get("routemanager")
        if client_mapping.get("nighttime_area", None) is not None:
            nightime_routemanager = self.routemanagers[
                client_mapping["nighttime_area"]].get("routemanager", None)
        else:
            nightime_routemanager = None
        devicesettings = client_mapping["settings"]

        started = False
        if MadGlobals.sleep is True:
            # start the appropriate nighttime manager if set
            if nightime_routemanager is None:
                pass
            elif nightime_routemanager.mode in [
                    "raids_mitm", "mon_mitm", "iv_mitm"
            ]:
                Worker = WorkerMITM(self.args,
                                    id,
                                    lastKnownState,
                                    self,
                                    daytime_routemanager,
                                    nightime_routemanager,
                                    self._mitm_mapper,
                                    devicesettings,
                                    db_wrapper=self.db_wrapper)
                started = True
            elif nightime_routemanager.mode in ["raids_ocr"]:
                from worker.WorkerOcr import WorkerOcr
                Worker = WorkerOcr(self.args,
                                   id,
                                   lastKnownState,
                                   self,
                                   daytime_routemanager,
                                   nightime_routemanager,
                                   devicesettings,
                                   db_wrapper=self.db_wrapper)
                started = True
            elif nightime_routemanager.mode in ["pokestops"]:
                Worker = WorkerQuests(self.args,
                                      id,
                                      lastKnownState,
                                      self,
                                      daytime_routemanager,
                                      nightime_routemanager,
                                      self._mitm_mapper,
                                      devicesettings,
                                      db_wrapper=self.db_wrapper)
                started = True
            else:
                log.fatal("Mode not implemented")
                sys.exit(1)
        if not MadGlobals.sleep or not started:
            # we either gotta run daytime mode OR nighttime routemanager not set
            if daytime_routemanager.mode in [
                    "raids_mitm", "mon_mitm", "iv_mitm"
            ]:
                Worker = WorkerMITM(self.args,
                                    id,
                                    lastKnownState,
                                    self,
                                    daytime_routemanager,
                                    nightime_routemanager,
                                    self._mitm_mapper,
                                    devicesettings,
                                    db_wrapper=self.db_wrapper)
            elif daytime_routemanager.mode in ["raids_ocr"]:
                from worker.WorkerOcr import WorkerOcr
                Worker = WorkerOcr(self.args,
                                   id,
                                   lastKnownState,
                                   self,
                                   daytime_routemanager,
                                   nightime_routemanager,
                                   devicesettings,
                                   db_wrapper=self.db_wrapper)
            elif daytime_routemanager.mode in ["pokestops"]:
                Worker = WorkerQuests(self.args,
                                      id,
                                      lastKnownState,
                                      self,
                                      daytime_routemanager,
                                      nightime_routemanager,
                                      self._mitm_mapper,
                                      devicesettings,
                                      db_wrapper=self.db_wrapper)
            else:
                log.fatal("Mode not implemented")
                sys.exit(1)

        newWorkerThread = Thread(name='worker_%s' % id,
                                 target=Worker.start_worker)
        self.__current_users[id] = [newWorkerThread, Worker, websocket]
        newWorkerThread.daemon = False
        newWorkerThread.start()

        return True
Exemple #60
0
def run_parallel(target, tasks, n_workers=None, use_processes=False):
    """
    Runs tasks in parallel

    `target` is a function
    `tasks` is a list of argument tuples passed to the function. If `target` only takes one argument, then it doesn't need to
    be wrapped in a tuple.

    A generator yielding the result of each task is returned, in the form of a (result, error) tuple which allows errors to be
    handled. The generator must be consumed in order to ensure all tasks are processed.
    Results are yielded in the order they are completed, which is generally not the same as the order in which they are supplied.

    Example::

        def do_hard_work(a, b):
            ...

        tasks = [(1,2), (5,2), (3,4) ....]

        for result, error in run_parallel(do_hard_work, tasks):
            print result

    A pool of worker threads (or processes if `use_processes = True`) is used to process the tasks.
    Threads may not always be able to achieve parallelism due to Python GIL.
    If using processes, be careful not to use shared global resources such as database connection pools in the target function.
    The number of workers defaults to the number of cpu cores as reported by `multiprocessing.cpu_count`, but can be set
    using the `n_workers` parameter.
    """
    if n_workers is None:
        n_workers = multiprocessing.cpu_count()

    # Multiprocessing has issues on Windows
    if platform.system() == 'Windows':
        use_processes = False

    Q = multiprocessing.Queue if use_processes else Queue

    # Setup queues

    work_q = Q()
    result_q = Q()

    # Helper to get next item from queue without constantly blocking
    def next(q):
        while True:
            try:
                return q.get(
                    True,
                    1)  # Get with timeout so thread isn't constantly blocked
            except Empty:
                pass
            except:
                log.exception("Exception getting item from queue")
                raise

    # Setup worker threads
    def worker(work_q, result_q):
        while True:
            task = next(work_q)
            if task is None:
                break
            try:
                if type(task) != tuple:
                    task = (task, )
                result_q.put((target(*task), None))
            except Exception as e:
                log.exception("Exception in worker")
                result_q.put((None, e))

    for i in range(0, n_workers):
        if use_processes:
            p = multiprocessing.Process(target=worker, args=(work_q, result_q))
            p.start()
        else:
            t = Thread(target=worker, args=(work_q, result_q))
            t.daemon = True
            t.start()

    # Feed in tasks and yield results
    i = 0
    for task in tasks:
        work_q.put(task)
        i += 1
        # Start getting results once all threads have something to do
        if i > n_workers:
            yield next(result_q)
            i -= 1

    # Signal threads to stop
    for j in range(0, n_workers):
        work_q.put(None)

    # Finish collecting results
    while i > 0:
        yield next(result_q)
        i -= 1