示例#1
0
 def fork_handler():
     if detach:
         # Set umask to a sane value
         # (other and group has no write permission by default)
         os.umask(022)
         # Detach from controlling terminal
         try:
             tty_fd = os.open("/dev/tty", os.O_RDWR)
             fcntl.ioctl(tty_fd, termios.TIOCNOTTY)
             os.close(tty_fd)
         except OSError:
             pass
         # Close IO channels
         devnull_fd = os.open("/dev/null", os.O_RDWR)
         os.dup2(devnull_fd, 0)
         os.dup2(devnull_fd, 1)
         os.dup2(devnull_fd, 2)
         # Detach from process group
         os.setsid()
     if nice is not None:
         os.nice(nice)
     if makepid and pidfile:
         file(pidfile, "w").write("%d\n" % os.getpid())
     if chuid:
         _changeUID(chuid)
示例#2
0
文件: forkedfunc.py 项目: 6br/servo
 def _child(self, nice_level, child_on_start, child_on_exit):
     # right now we need to call a function, but first we need to
     # map all IO that might happen
     sys.stdout = stdout = get_unbuffered_io(1, self.STDOUT)
     sys.stderr = stderr = get_unbuffered_io(2, self.STDERR)
     retvalf = self.RETVAL.open("wb")
     EXITSTATUS = 0
     try:
         if nice_level:
             os.nice(nice_level)
         try:
             if child_on_start is not None:
                 child_on_start()
             retval = self.fun(*self.args, **self.kwargs)
             retvalf.write(marshal.dumps(retval))
             if child_on_exit is not None:
                 child_on_exit()
         except:
             excinfo = py.code.ExceptionInfo()
             stderr.write(str(excinfo._getreprcrash()))
             EXITSTATUS = self.EXITSTATUS_EXCEPTION
     finally:
         stdout.close()
         stderr.close()
         retvalf.close()
     os.close(1)
     os.close(2)
     os._exit(EXITSTATUS)
示例#3
0
文件: box.py 项目: TheDunn/flex-pypy
 def children(self, nice_level):
     # right now we need to call a function, but first we need to
     # map all IO that might happen
     # make sure sys.stdout points to file descriptor one
     sys.stdout = stdout = self.PYTESTSTDOUT.open('w')
     sys.stdout.flush()
     fdstdout = stdout.fileno()
     if fdstdout != 1:
         os.dup2(fdstdout, 1)
     sys.stderr = stderr = self.PYTESTSTDERR.open('w')
     fdstderr = stderr.fileno()
     if fdstderr != 2:
         os.dup2(fdstderr, 2)
     retvalf = self.PYTESTRETVAL.open("w")
     try:
         if nice_level:
             os.nice(nice_level)
         # with fork() we have duplicated py.test's basetemp
         # directory so we want to set it manually here. 
         # this may be expensive for some test setups, 
         # but that is what you get with boxing. 
         # XXX but we are called in more than strict boxing
         # mode ("AsyncExecutor") so we can't do the following without
         # inflicting on --dist speed, hum: 
         # pytestconfig.basetemp = self.tempdir.join("childbasetemp")
         retval = self.fun(*self.args, **self.kwargs)
         retvalf.write(marshal.dumps(retval))
     finally:
         stdout.close()
         stderr.close()
         retvalf.close()
     os._exit(0)
示例#4
0
 def _child(self, nice_level):
     # right now we need to call a function, but first we need to
     # map all IO that might happen
     # make sure sys.stdout points to file descriptor one
     sys.stdout = stdout = self.STDOUT.open('w')
     sys.stdout.flush()
     fdstdout = stdout.fileno()
     if fdstdout != 1:
         os.dup2(fdstdout, 1)
     sys.stderr = stderr = self.STDERR.open('w')
     fdstderr = stderr.fileno()
     if fdstderr != 2:
         os.dup2(fdstderr, 2)
     retvalf = self.RETVAL.open("wb")
     EXITSTATUS = 0
     try:
         if nice_level:
             os.nice(nice_level)
         try:
             retval = self.fun(*self.args, **self.kwargs)
             retvalf.write(marshal.dumps(retval))
         except:
             excinfo = py.code.ExceptionInfo()
             stderr.write(excinfo.exconly())
             EXITSTATUS = self.EXITSTATUS_EXCEPTION
     finally:
         stdout.close()
         stderr.close()
         retvalf.close()
     os.close(1)
     os.close(2)
     os._exit(EXITSTATUS)
示例#5
0
def set_idle_priority():
    """Attempt to set the process priority to idle"""
    try:
        os.nice(20)
    except AttributeError:
        pass  # No os.nice on Windows
    if sys.platform.startswith("linux"):
        import ctypes.util
        import platform
        # Try using a syscall to set io priority...
        __NR_ioprio_set = {  # see Linux sources, Documentation/block/ioprio.txt
            "i386": 289,
            "x86_64": 251,
        }.get(platform.machine())
        if __NR_ioprio_set is not None:
            libc = ctypes.cdll.LoadLibrary(ctypes.util.find_library("c"))
            IOPRIO_WHO_PROCESS = 1
            IOPRIO_CLASS_IDLE = 3
            IOPRIO_CLASS_SHIFT = 13
            libc.syscall(__NR_ioprio_set, IOPRIO_WHO_PROCESS, 0,
                         IOPRIO_CLASS_IDLE << IOPRIO_CLASS_SHIFT)
    elif sys.platform.startswith("win"):
        import ctypes
        from ctypes import wintypes
        SetPriorityClass = ctypes.windll.kernel32.SetPriorityClass
        SetPriorityClass.argtypes = [wintypes.HANDLE, wintypes.DWORD]
        SetPriorityClass.restype = wintypes.BOOL
        HANDLE_CURRENT_PROCESS = -1
        IDLE_PRIORITY_CLASS = 0x00000040
        SetPriorityClass(HANDLE_CURRENT_PROCESS, IDLE_PRIORITY_CLASS)
        # On Vista+, this sets the I/O priority to very low
        PROCESS_MODE_BACKGROUND_BEGIN = 0x00100000
        SetPriorityClass(HANDLE_CURRENT_PROCESS, PROCESS_MODE_BACKGROUND_BEGIN)
def main(queue):
	import Queue
	# set process niceness value to lower its priority
	os.nice(1)
	try:
		pass
	except:
		pass
	else:
		print("Mocking Climaduino reads and writes")
		# We are going to create a loop that looks for a line on Serial. If there is a line,
		# send it to the co-routine that interprets and logs it.
		#
		# If there is a message to send on Serial, it picks it up, and sends it.
		last_serial_read = None
		#broadcast_to_display_database = broadcast([display_data(), update_database()])
		while 1:
			if last_serial_read == None or (time.time() - last_serial_read > 10): # only check Serial port at most every 4 seconds
				line = {'status':{'lastStateChange':'Never', 'systemRunning':'N', 'lastStateChange':0, 'millis':100000},
					    'parameters':{'mode':0, 'temp':77, 'humidity':55},
					    'readings':{'temp':78, 'humidity':55}}
				last_serial_read = time.time()
				if line:
					#broadcast_to_display_database.send(line)
					pass
			try:
				parameter = queue.get(False) #non-blocking read. If empty, we handle the exception below
			except Queue.Empty:
				pass
			else:
				print(parameter)
			time.sleep(.25)
	finally:
		pass
示例#7
0
文件: install.py 项目: edgarcosta/smc
def nice():
    try:
        import psutil  # not available by default (e.g., when building with docker)
        os.nice(10)
        psutil.Process(os.getpid()).ionice(ioclass=psutil.IOPRIO_CLASS_IDLE)
    except:
        print "WARNING: psutil not available so not re-nicing build of webapp"
示例#8
0
	def main_update(self):
		"""
		Main function called by the updater thread.
		Direct call is unnecessary.
		"""
	
		# Renice updater thread to limit overload
		os.nice(1)
		time.sleep(self.refresh)

		try:
			while True:
				# We pick a timestamp to take in account the time used by update() 
				timestamp=time.time()

				# Update data with user's defined function
				self.update()

				# We use this trick because we cannot use signals in a backoffice threads
				# and alarm() mess up with readline() in the main thread.
				delay=(timestamp+self.refresh)-time.time()
				if delay > 0:
					if delay > self.refresh:
						time.sleep(self.refresh)
					else:
						time.sleep(delay)

				# Commit change exactly every 'refresh' seconds, whatever update() takes long.
				# Commited values are a bit old, but for RRD, punctuals values 
				# are better than fresh-but-not-time-constants values.
				self.commit()

		except Exception,e:
			self.error=e
			raise
示例#9
0
def _run_chassis(fabricconfig, mgmtbusconfig, fts):
    try:
        # lower priority to make master and web
        # more "responsive"
        os.nice(5)

        c = minemeld.chassis.Chassis(
            fabricconfig['class'],
            fabricconfig['config'],
            mgmtbusconfig
        )
        c.configure(fts)

        gevent.signal(signal.SIGUSR1, c.stop)

        while not c.fts_init():
            if c.poweroff.wait(timeout=0.1) is not None:
                break

            gevent.sleep(1)

        LOG.info('Nodes initialized')

        try:
            c.poweroff.wait()
            LOG.info('power off')

        except KeyboardInterrupt:
            LOG.error("We should not be here !")
            c.stop()

    except:
        LOG.exception('Exception in chassis main procedure')
        raise
示例#10
0
def main():
    """Main Loop."""
    APPNAME = str(__package__ or __doc__)[:99].lower().strip().replace(" ", "")
    if not sys.platform.startswith("win") and sys.stderr.isatty():
        def add_color_emit_ansi(fn):
            """Add methods we need to the class."""
            def new(*args):
                """Method overload."""
                if len(args) == 2:
                    new_args = (args[0], copy(args[1]))
                else:
                    new_args = (args[0], copy(args[1]), args[2:])
                if hasattr(args[0], 'baseFilename'):
                    return fn(*args)
                levelno = new_args[1].levelno
                if levelno >= 50:
                    color = '\x1b[31;5;7m\n '  # blinking red with black
                elif levelno >= 40:
                    color = '\x1b[31m'  # red
                elif levelno >= 30:
                    color = '\x1b[33m'  # yellow
                elif levelno >= 20:
                    color = '\x1b[32m'  # green
                elif levelno >= 10:
                    color = '\x1b[35m'  # pink
                else:
                    color = '\x1b[0m'  # normal
                try:
                    new_args[1].msg = color + str(new_args[1].msg) + ' \x1b[0m'
                except Exception as reason:
                    print(reason)  # Do not use log here.
                return fn(*new_args)
            return new
        # all non-Windows platforms support ANSI Colors so we use them
        log.StreamHandler.emit = add_color_emit_ansi(log.StreamHandler.emit)
    log.basicConfig(level=-1, format="%(levelname)s:%(asctime)s %(message)s")
    log.getLogger().addHandler(log.StreamHandler(sys.stderr))
    log.info(__doc__)
    try:
        os.nice(19)  # smooth cpu priority
        libc = cdll.LoadLibrary('libc.so.6')  # set process name
        buff = create_string_buffer(len(APPNAME) + 1)
        buff.value = bytes(APPNAME.encode("utf-8"))
        libc.prctl(15, byref(buff), 0, 0, 0)
    except Exception as reason:
        log.warning(reason)
    signal.signal(signal.SIGINT, signal.SIG_DFL)  # CTRL+C work to quit app
    app = QApplication(sys.argv)
    app.setApplicationName(APPNAME)
    app.setOrganizationName(APPNAME)
    app.setOrganizationDomain(APPNAME)
    app.instance().setQuitOnLastWindowClosed(False)  # no quit on dialog close
    icon = QIcon(app.style().standardPixmap(QStyle.SP_FileIcon))
    app.setWindowIcon(icon)
    win = MainWindow(icon)
    win.show()
    log.info('Total Maximum RAM Memory used: ~{} MegaBytes.'.format(int(
        resource.getrusage(resource.RUSAGE_SELF).ru_maxrss *
        resource.getpagesize() / 1024 / 1024 if resource else 0)))
    sys.exit(app.exec_())
示例#11
0
    def run(self):
        print("STARTING IMPROVEMENT JOB {}".format(self.q.name))
        os.makedirs(log_dir.value, exist_ok=True)
        with open(os.path.join(log_dir.value, "{}.log".format(self.q.name)), "w", buffering=LINE_BUFFER_MODE) as f:
            sys.stdout = f
            print("STARTING IMPROVEMENT JOB {}".format(self.q.name))
            print(pprint(self.q))

            if nice_children.value:
                os.nice(20)

            cost_model = CostModel(
                    funcs=self.context.funcs(),
                    assumptions=EAll(self.assumptions),
                    freebies=self.freebies,
                    ops=self.ops)

            try:
                for expr in itertools.chain((self.q.ret,), core.improve(
                        target=self.q.ret,
                        assumptions=EAll(self.assumptions),
                        context=self.context,
                        hints=self.hints,
                        stop_callback=lambda: self.stop_requested,
                        cost_model=cost_model,
                        ops=self.ops,
                        improve_count=self.improve_count)):

                    new_rep, new_ret = unpack_representation(expr)
                    self.k(new_rep, new_ret)
                print("PROVED OPTIMALITY FOR {}".format(self.q.name))
            except core.StopException:
                print("stopping synthesis of {}".format(self.q.name))
                return
示例#12
0
	def run(self, high_priority = True):
		""" Create clock with 64th-beat granularity.
		By default, attempts to run as a high-priority thread
		(though requires being run as root to re-nice the process) """
		try:
			import os
			os.nice(-20)
			print "Timeline: Running as high-priority thread"
		except:
			pass

		try:
			#------------------------------------------------------------------------
			# Start the clock. This might internal (eg a Clock object, running on
			# an independent thread), or external (eg a MIDI clock).
			#------------------------------------------------------------------------
			if self.clockmode == self.CLOCK_INTERNAL:
				self.clock.run(self)
			else:
				self.clocksource.run()

		except StopIteration:
			#------------------------------------------------------------------------
			# This will be hit if every Pattern in a timeline is exhausted.
			#------------------------------------------------------------------------
			print "Timeline finished"

		except Exception, e:
			print " *** Exception in background Timeline thread: %s" % e
			traceback.print_exc(file = sys.stdout)
示例#13
0
文件: io.py 项目: PaulBatchelor/pippi
    def _armGenerator(self, generator_name, gen, ctl, trigger, respawn):
        try:
            os.nice(-2)
        except OSError:
            os.nice(0)

        voice_id = os.getpid()
        out = self.openAudioDevice()
        ctl['count'] = 1

        if getattr(self.ns, 'reload', False) == True:
            reload(gen)

        note_info = trigger.wait()
        ctl['note'] = note_info

        respawn.set()

        snd = gen.play(ctl)

        if self.ns.grid:
            self.tick.wait()

        out.write(snd)

        del out
示例#14
0
文件: io.py 项目: PaulBatchelor/pippi
    def gridHandler(self, tick, bpm, midiout=4):
        # TODO: send MIDI clock again, assign device via 
        # config file and/or console cmd. Also allow mapping 
        # MIDI control to grid tempo value without resetting
        os.nice(0)

        cdiv = 24

        beat = dsp.bpm2frames(bpm) / cdiv

        count = 0

        ts = time.time()

        while getattr(self.ns, 'grid', True):
            if count % cdiv == 0:
                tick.set()
                tick.clear()

            ts = time.time()

            dsp.delay(beat)
            count += 1

        ts = time.time()
示例#15
0
def _lowpriority():
    """ Set the priority of the process to below-normal."""

    import sys
    try:
        sys.getwindowsversion()
    except AttributeError:
        isWindows = False
    else:
        isWindows = True

    if isWindows:
        # Based on:
        #   "Recipe 496767: Set Process Priority In Windows" on ActiveState
        #   http://code.activestate.com/recipes/496767/
        import win32api
        import win32process
        import win32con

        pid = win32api.GetCurrentProcessId()
        handle = win32api.OpenProcess(win32con.PROCESS_ALL_ACCESS, True, pid)
        win32process.SetPriorityClass(
            handle, win32process.BELOW_NORMAL_PRIORITY_CLASS,
        )
    else:
        import os

        os.nice(1)
示例#16
0
def main():
    parser = argparse.ArgumentParser(description='Search a space for a string.')
    parser.add_argument('needle', help='string to search for')
    parser.add_argument('--walker', '-w', default='pyrandom', choices=sorted(walkers.keys()),
                        help='spacewalker to search with')
    parser.add_argument('--num-procs', '-n', type=int, default=None,
                        help='number of processors to run on')
    parser.add_argument('--chunk-size', '-c', type=int, default=100000)
    parser.add_argument('--verbose', '-v', action='count')
    args = parser.parse_args()

    if args.verbose > 1:
        loglevel = logging.DEBUG
    elif args.verbose == 1:
        loglevel = logging.INFO
    else:
        loglevel = logging.WARNING
    logging.basicConfig(level=loglevel)

    search = walkers[args.walker].search
    validate = walkers[args.walker].validate

    os.nice(20)
    params = explore(args.needle, search, args.num_procs, args.chunk_size)
    print('Matching params: {}'.format(params))
    print('Validation: {}'.format(
        ''.join(char for char, _ in zip(
            validate(**params),  # pylint: disable=W0142
            range(len(args.needle))))))
示例#17
0
def lowpriority():
    """ Set the priority of the process to below-normal.
        Copied from: http://stackoverflow.com/questions/1023038/change-process-priority-in-python-cross-platform"""

    try:
        sys.getwindowsversion()
    except:
        isWindows = False
    else:
        isWindows = True

    try:
        if isWindows:
            # Based on:
            #   "Recipe 496767: Set Process Priority In Windows" on ActiveState
            #   http://code.activestate.com/recipes/496767/
            import win32api, win32process, win32con
            pid = os.getpid()
            handle = win32api.OpenProcess(win32con.PROCESS_ALL_ACCESS, True, pid)
            win32process.SetPriorityClass(handle, win32process.BELOW_NORMAL_PRIORITY_CLASS)
            win32api.CloseHandle(handle)
        else:
            # Unix and Mac should have a nice function
            os.nice(1)
    except:
        logger = logging.getLogger(__name__ + '.lowpriority')
        if not logger is None:
            logger.warn("Could not lower process priority")
            if isWindows:
                logger.warn("Are you missing Win32 extensions for python? http://sourceforge.net/projects/pywin32/")
        pass
示例#18
0
def main():
    global task
    parser = OptionParser(
        "%prog --daemon_status_id <id> --queue_name <queue_name> \
[--nice <0>] [--stdout <file_name|DEFAULT>] [--stderr <file_name>|STDOUT>] [--debug]"
    )
    parser.add_option(
        "--daemon_status_id", action="store", type="int", help="The id of the daemon status that launched this Task"
    )
    parser.add_option("--queue_name", action="store", type="string", help="The name of the queue from which to read")
    parser.add_option("--nice", action="store", type="int", default=0, help="nice this process. defaults to 5.")
    parser.add_option(
        "--stdout",
        action="store",
        type="string",
        help="Send stdout to this file, or special value 'DEFAULT' \
sends it a the stream unique to this Task request",
    )
    parser.add_option(
        "--stderr",
        action="store",
        type="string",
        help="Send stderr to this file, or special value 'STDOUT' sends it to stdout",
    )
    parser.add_option("--debug", action="store_true", help="more messages")
    (options, args) = parser.parse_args()

    # option parsing
    if not options.daemon_status_id or not options.queue_name:
        sys.exit(parser.get_usage())
    log.set_logging_debug(options.debug)

    if not options.nice == 0:
        os.nice(options.nice)

    console_stderr = None
    try:
        c = SQSConnection(settings.AWS_ACCESS_KEY_ID, settings.AWS_SECRET_ACCESS_KEY)
        q = c.get_queue(options.queue_name)
        boto_message = q.read()
        task = __get_task__(boto_message, options.queue_name)
        if task == None:
            log.debug("No task in queue '%s' pid:%s" % (options.queue_name, os.getpid()))
            sys.exit(133)
        else:
            log.debug("Starting SQS Queue '%s' Task:%s pid:%s" % (options.queue_name, task.get_id(), os.getpid()))
            q.delete_message(boto_message)
            console_stderr = __redirect_outputs__(task, options.stdout, options.stderr)
            daemon_status = __get_daemon_status__(options.daemon_status_id)
            __run_task__(task, daemon_status)
            ending_status = task.get_current_run_status()
            if ending_status == None:
                sys.exit(134)
            if not ending_status.was_successful():
                sys.exit(1)
    except SystemExit, se:
        # in python 2.4, SystemExit extends Exception, this is changed in 2.5 to
        # extend BaseException, specifically so this check isn't necessary. But
        # we're using 2.4; upon upgrade, this check will be unecessary but ignorable.
        sys.exit(se.code)
示例#19
0
  def read_input(self, callback):
    # Event handling code from http://stackoverflow.com/questions/5060710
    
    
    #Add some priority to this
    #import os
    os.nice(15)

    # long int, long int, unsigned short, unsigned short, unsigned int
    FORMAT = 'llHHl'
    EVENT_SIZE = struct.calcsize(FORMAT)

    in_file = open(self.input_device.get('DEVNAME'), "rb")

    event = in_file.read(EVENT_SIZE)

    while event:
      (tv_sec, tv_usec, type, code, value) = struct.unpack(FORMAT, event)

      if type == 2 and ((code == 8 and not self.is_powermate) or (code == 7 and self.is_powermate)):
        if value == 1:
          tornado.ioloop.IOLoop.instance().add_callback(functools.partial(callback, 'increase'))
        else:
          tornado.ioloop.IOLoop.instance().add_callback(functools.partial(callback, 'decrease'))
      elif type == 1:
        if value == 1:
          tornado.ioloop.IOLoop.instance().add_callback(functools.partial(callback, 'press'))
        else:
          tornado.ioloop.IOLoop.instance().add_callback(functools.partial(callback, 'release'))

      event = in_file.read(EVENT_SIZE)

    in_file.close()
示例#20
0
def set_high_priority(logger):
  """ Change process scheduler and priority. """
  # use "real time" scheduler
  done = False
  sched = os.SCHED_RR
  if os.sched_getscheduler(0) == sched:
    # already running with RR scheduler, likely set from init system, don't touch priority
    done = True
  else:
    prio = (os.sched_get_priority_max(sched) - os.sched_get_priority_min(sched)) // 2
    param = os.sched_param(prio)
    try:
      os.sched_setscheduler(0, sched, param)
    except OSError:
      logger.warning("Failed to set real time process scheduler to %u, priority %u" % (sched, prio))
    else:
      done = True
      logger.info("Process real time scheduler set to %u, priority %u" % (sched, prio))

  if not done:
    # renice to highest priority
    target_niceness = -19
    previous_niceness = os.nice(0)
    delta_niceness = target_niceness - previous_niceness
    try:
      new_niceness = os.nice(delta_niceness)
    except OSError:
      new_niceness = previous_niceness
    if new_niceness != target_niceness:
      logger.warning("Unable to renice process to %d, current niceness is %d" % (target_niceness, new_niceness))
    else:
      logger.info("Process reniced from %d to %d" % (previous_niceness, new_niceness))
示例#21
0
文件: net.py 项目: golmschenk/go_net
    def __init__(self, message_queue=None, settings=None):
        super().__init__()
        if settings:
            self.settings = settings
        else:
            self.settings = Settings()

        # Common variables.
        self.data = Data(settings=settings)
        self.training_dropout_keep_probability = 0.5

        # Logging.
        self.step_summary_name = "Loss per pixel"
        self.image_summary_on = True

        # Internal setup.
        self.stop_signal = False
        self.global_step = tf.Variable(0, name='global_step', trainable=False)
        self.saver = None
        self.session = None
        self.dataset_selector_tensor = tf.placeholder(dtype=tf.string)
        self.dropout_keep_probability_tensor = tf.placeholder(tf.float32)
        self.learning_rate_tensor = tf.train.exponential_decay(self.settings.initial_learning_rate,
                                                               self.global_step,
                                                               self.settings.learning_rate_decay_steps,
                                                               self.settings.learning_rate_decay_rate)
        self.queue = message_queue
        self.predicted_test_labels = None
        self.test_step = 0

        os.nice(10)
示例#22
0
def set_high_priority(logger):
  """ Change process priority to the highest possible. """
  # use "real time" scheduler
  done = False
  sched = os.SCHED_RR
  prio = os.sched_get_priority_max(sched)
  param = os.sched_param(prio)
  try:
    os.sched_setscheduler(0, sched, param)
  except OSError:
    logger.warning("Failed to set real time process scheduler to %u, priority %u" % (sched, prio))
  else:
    done = True
    logger.info("Process real time scheduler set to %u, priority %u" % (sched, prio))

  if not done:
    # renice to highest priority
    target_niceness = -19
    previous_niceness = os.nice(0)
    delta_niceness = target_niceness - previous_niceness
    try:
      new_niceness = os.nice(delta_niceness)
    except OSError:
      new_niceness = previous_niceness
    if new_niceness != target_niceness:
      logger.warning("Unable to renice process to %d, current niceness is %d" % (target_niceness, new_niceness))
    else:
      logger.info("Process reniced from %d to %d" % (previous_niceness, new_niceness))
 def runProgram(self, runCommandList):
     # 프로세스 우선순위 증가
     os.nice(19)
     
     # 프로그램 실행 결과 출력되는 stdout을 파일로 리다이렉션
     redirectionSTDOUT = os.open(FileNameNPathResources.const.OutputResultFileName,
                                 os.O_RDWR|os.O_CREAT)
     os.dup2(redirectionSTDOUT,1)
     
     # 프로세스 사용 자원 제한
     soft, hard = resource.getrlimit(resource.RLIMIT_CPU)
     rlimTime = int(self.limitTime / 1000) + 1
     
     resource.setrlimit(resource.RLIMIT_CPU, (rlimTime,hard))
     
     # 파이썬 런타임 에러 메시지 리다이렉션
     if self.usingLang == ListResources.const.Lang_PYTHON:
         redirectionSTDERROR = os.open('run.err', os.O_RDWR|os.O_CREAT)
         os.dup2(redirectionSTDERROR, 2)
     
     if self.caseCount is not 0:
         redirectionSTDIN = os.open('input.txt', os.O_RDONLY)
         os.dup2(redirectionSTDIN, 0)
     
     # 프로세스 추적/실행
     ptrace.traceme()
         
     os.execl(runCommandList[0], runCommandList[1], runCommandList[2])
示例#24
0
def main(unused_args):
    """procnettcp main loop"""
    drop_privileges()
    try:           # On some Linux kernel versions, with lots of connections
      os.nice(19)  # this collector can be very CPU intensive.  So be nicer.
    except OSError, e:
      print >>sys.stderr, "warning: failed to self-renice:", e
示例#25
0
    def run(self):
        try:
            signal.signal(signal.SIGINT, signal.SIG_IGN)
            # sometimes scipy spills warnings to stderr
            # redirect stdout,stderr to /dev/null
            nullfd = os.open(os.devnull,os.O_RDWR)
            os.dup2(nullfd,sys.stdout.fileno())
            os.dup2(nullfd,sys.stderr.fileno())

            os.nice(15)
            gc.collect()
            log.logger = None
            repredict_threshold = 20
            while True:
                cov,rec = self.pipe.recv()
                self._coverage_data.append((cov,rec))
                for i in xrange(repredict_threshold):
                    if not self.pipe.poll():
                        break;
                    cov,rec = self.pipe.recv()
                    self._coverage_data.append((cov,rec))
                size = self._predict_zone_size()
                self.pipe.send(int(size))
        except EOFError:
            sys.exit(0)
        except KeyboardInterrupt:
            sys.exit(3)
示例#26
0
文件: pytis.py 项目: Methimpact/btcdm
	def Run(self):
		i=0
		os.nice(self.niceness)
		while self.keep_going:
			if not self.callbacks:
				self._run()
			else:	
				if self.frequency or ( not self.frequency and not i):
					for v in self.callbacks:
						callback = v.getCallback()
						args = v.getArgs()
						kwargs = v.getKwArgs()
						try:
							callback(*args,**kwargs)
						except (KeyboardInterrupt, QuitNow), e:
							print "\nbye!"
							self.keep_going = False
							self._stop()
							self.running=False
							return
						except Exception, e:
							self.log.error("Some error occured.")
							type_,value_,traceback_ = sys.exc_info()
							self.log.debug("type: %s" % type_)
							self.log.debug("type2: %s" %type(e))
							self.log.debug("value: %s" % value_)
							for tb_line in traceback.format_tb(traceback_):
								self.log.debug(tb_line)
							self.log.error(str(e))
示例#27
0
    def __init__(self, config, logger, readq):
        super(Procnettcp, self).__init__(config, logger, readq)
        try:           # On some Linux kernel versions, with lots of connections
            os.nice(19)  # this collector can be very CPU intensive.  So be nicer.
        except OSError:
            self.log_exception("warning: failed to self-renice:")

        # resolve the list of users to match on into UIDs
        self.uids = {}
        for user in USERS:
            try:
                self.uids[str(pwd.getpwnam(user)[2])] = user
            except KeyError:
                continue

        try:
            self.tcp = open("/proc/net/tcp")
            # if IPv6 is enabled, even IPv4 connections will also
            # appear in tcp6. It has the same format, apart from the
            # address size
            try:
                self.tcp6 = open("/proc/net/tcp6")
            except IOError, (errno, msg):
                if errno == 2:  # No such file => IPv6 is disabled.
                    self.tcp6 = None
                else:
                    raise
        except IOError:
            self.log_exception("Failed to open proc/net/tcp file")
            self.cleanup()
            raise
示例#28
0
def makePlotArray(q,w,plotName,plotVar,cpus,mixState,massCat,cut,dataArr,varMax=-1,asym=False,numBins=False):
  os.nice(config['niceness'])
  plot = [plotName,ueberPlot(w,plotName,plotVar,cpus,mixState,massCat,cut,dataArr,varMax,asym,numBins)]
  histNameSuffix = "" if asym == False else "_Asym["+asym.GetName()+"]"
  #plot[1].Print("v")
  q.put([plot,
         [plotName+"_Pull",plot[1].pullHist("h_"+dataArr[-1].GetName()+histNameSuffix)]])
示例#29
0
文件: io.py 项目: bensteinberg/pippi
    def _armGenerator(self, generator_name, gen, ctl, trigger, respawn, grid_ctl):
        try:
            os.nice(-2)
        except OSError:
            os.nice(0)

        voice_id = os.getpid()
        out = self.openAudioDevice()
        ctl['count'] = 1

        if getattr(self.ns, 'reload', False) == True:
            reload(gen)

        note_info = trigger.wait()
        ctl['note'] = note_info

        respawn.set()

        snd = gen.play(ctl)

        if self.ns.grid:
            div = grid_ctl.geti(self.divcc, low=0, high=4, default=0)
            div = self.divs[div]
            self.ticks[div].wait()

        out.write(snd)

        del out
示例#30
0
        def __init__(self):
                os.nice(20)
                try:
                        self.application_dir = os.environ["UPDATE_MANAGER_NOTIFIER_ROOT"]
                except KeyError:
                        self.application_dir = "/"
                misc.setlocale(locale.LC_ALL, "")
                gettext.bindtextdomain("pkg", os.path.join(
                    self.application_dir,
                    "usr/share/locale"))
                gettext.textdomain("pkg")
                self.pr = None
                self.last_check_filename = None
                self.time_until_next_check = 0
                self.status_icon = None
                self.n_updates = 0
                self.n_installs = 0
                self.n_removes = 0
                self.notify = None
                self.host = None
                self.last_check_time = 0
                self.refresh_period = 0
                self.timeout_id = 0
                self.terminate_after_activate = False

                self.client = gconf.client_get_default()
                self.start_delay  =  self.get_start_delay()
                # Allow gtk.main loop to start as quickly as possible
                gobject.timeout_add(self.start_delay * 1000, self.check_and_start)
示例#31
0
    def run(self, nice_level=-10):

        print("Setting nice level...", end='')
        try:
            os.nice(nice_level
                    )  # this process (main_node) has the highest priority
        except PermissionError:
            raise Exception("You must be root to use nice smaller than 0")
        print("done!")

        print("Creating Pipes, Locks, Barriers...", end='')
        ext_control_pipe_write, ext_control_pipe_read = Pipe()
        control_optflow_pipe_write, control_optflow_pipe_read = Pipe()
        control_tof_pipe_write, control_tof_pipe_read = Pipe()
        control_imu_pipe_write, control_imu_pipe_read = Pipe()
        CMDS_lock = Lock()
        barrier_init_values = Barrier(
            3)  # print_values, read_voltage_from_fc and send_cmds_to_fc
        barrier_sensor_values = Barrier(3)  # tof, optflow and send_cmds_to_fc
        print("done!")

        threads = [
            Thread(target=self.joystick_interface,
                   args=(self.gamepad, ext_control_pipe_read, CMDS_lock)),
            Thread(target=self.read_voltage_from_fc,
                   args=(self.gamepad, barrier_init_values)),
            Thread(target=self.print_values,
                   args=(barrier_init_values, CMDS_lock, self.print_freq)),
            Thread(target=self.send_cmds_to_fc,
                   args=(control_imu_pipe_write, control_imu_pipe_read,
                         CMDS_lock, barrier_init_values,
                         barrier_sensor_values))  #,
            # Thread(target=self.tof,
            #        args=(control_tof_pipe_write, control_tof_pipe_read,
            #              barrier_sensor_values,
            #              self.sensors_init['beta_tof'], 40, 50)),
            # Thread(target=self.optflow,
            #        args=(control_optflow_pipe_write, control_optflow_pipe_read,
            #              barrier_sensor_values,
            #              self.sensors_init['beta_optflow'], 50)),
            # Thread(target=self.control,
            #        args=(control_optflow_pipe_read, control_tof_pipe_read,
            #              control_imu_pipe_read,
            #              ext_control_pipe_write, ext_control_pipe_read,
            #              0.50, self.controller_init, self.debug))
        ]

        thread_names = [
            'joystick_interface', 'read_voltage_from_fc', 'print_values',
            'send_cmds_to_fc'
        ]
        #, 'tof', 'optflow',
        #'control']

        try:
            print("Launching threads...")
            for ti in threads:
                ti.start()
            threads[0].join(
            )  # the idea is to kill all threads if this one (joystick_interface) stops
        finally:
            self.shutdown = True
            while any([ti.is_alive() for ti in threads]):
                print("Waiting for threads to gracefully die...", [
                    thread_names[i]
                    for i, ti in enumerate(threads) if ti.is_alive()
                ])
                time.sleep(0.1)  # just to avoid letting the loop going wild...

            # Saving to the sdcard is slow, so it must be the last thing to avoid
            # locking something important.
            if self.save_data:
                saved_cmds_filename = "CMDS-" + time.ctime().replace(
                    " ", "_").replace(":", ".") + ".npy"
                self.DATA.appendleft(self.controller_init)
                self.DATA.appendleft(self.sensors_init)
                self.DATA.appendleft({'nice_level': nice_level})
                print("Saving CMDS to file: ", saved_cmds_filename, end='')
                try:
                    np.save(
                        saved_cmds_filename,
                        self.DATA)  # TODO: maybe I should change to pickle...
                    print(' ...done!')
                except Exception as e:
                    print(" ...failed to save data!")
                    print(e)
示例#32
0
    def start(self, cores, memory, cpu_shares, base_url):
        self.remove_smc_path(
        )  # start can be prevented by massive logs in ~/.smc; if project not stopped via stop, then they will still be there.
        self.ensure_bashrc()
        self.remove_forever_path()  # probably not needed anymore
        self.remove_snapshots_path()
        self.create_user()
        self.create_smc_path()
        self.chown(
            self.project_path, False
        )  # Sometimes /projects/[project_id] doesn't have group/owner equal to that of the project.

        os.environ['SMC_BASE_URL'] = base_url

        if self._dev:
            self.dev_env()
            os.chdir(self.project_path)
            self.cmd("smc-local-hub start")

            def started():
                return os.path.exists("%s/local_hub/local_hub.port" %
                                      self.smc_path)

            i = 0
            while not started():
                time.sleep(0.1)
                i += 1
                sys.stdout.flush()
                if i >= 100:
                    return
            return

        pid = os.fork()
        if pid == 0:
            try:
                os.nice(-os.nice(0))  # Reset nice-ness to 0
                os.setgroups([])  # Drops other groups, like root or sudoers
                os.setsid()  # Make it a session leader
                os.setgid(self.uid)
                os.setuid(self.uid)

                try:
                    # Fork a second child and exit immediately to prevent zombies.  This
                    # causes the second child process to be orphaned, making the init
                    # process responsible for its cleanup.
                    pid = os.fork()
                except OSError, e:
                    raise Exception, "%s [%d]" % (e.strerror, e.errno)

                if pid == 0:
                    os.environ['HOME'] = self.project_path
                    os.environ['SMC'] = self.smc_path
                    os.environ['USER'] = os.environ['USERNAME'] = os.environ[
                        'LOGNAME'] = self.username
                    os.environ['MAIL'] = '/var/mail/%s' % self.username
                    if self._single:
                        # In single-machine mode, everything is on localhost.
                        os.environ['SMC_HOST'] = 'localhost'
                    del os.environ['SUDO_COMMAND']
                    del os.environ['SUDO_UID']
                    del os.environ['SUDO_GID']
                    del os.environ['SUDO_USER']
                    os.chdir(self.project_path)
                    self.cmd("smc-start")
                else:
                    os._exit(0)
            finally:
示例#33
0
 def start(self):
     os.nice(10)     # mommy always told me to be nice with others...
     Worker.start(self)
     if self.multi.socket:
         self.multi.socket.close()
示例#34
0
def main():

    channel_type = sys.argv[1]

    # the default nice is inheriting from parent neovim process.  Increment it
    # so that heavy calculation will not block the ui.
    try:
        os.nice(5)
    except:
        pass

    # psutil ionice
    try:
        import psutil
        p = psutil.Process(os.getpid())
        p.ionice(psutil.IOPRIO_CLASS_IDLE)
    except:
        pass

    if channel_type == 'core':
        source_name = 'cm_core'
        modulename = 'cm_core'
        serveraddr = sys.argv[2]
    else:
        source_name = sys.argv[2]
        modulename = sys.argv[3]
        serveraddr = sys.argv[4]

    setup_logging(modulename)

    logger.info("start_channel for %s", modulename)

    # change proccess title
    try:
        import setproctitle
        setproctitle.setproctitle('%s nvim-completion-manager' % modulename)
    except:
        pass

    # Stop Popen from openning console window on Windows system
    if platform.system() == 'Windows':
        try:
            import subprocess
            cls = subprocess.Popen

            class NewPopen(cls):
                def __init__(self, *args, **keys):
                    if 'startupinfo' not in keys:
                        si = subprocess.STARTUPINFO()
                        si.dwFlags |= subprocess.STARTF_USESHOWWINDOW
                        keys['startupinfo'] = si
                    cls.__init__(self, *args, **keys)

            subprocess.Popen = NewPopen
        except Exception as ex:
            logger.exception(
                'Failed hacking subprocess.Popen for windows platform: %s', ex)

    try:
        start_and_run_channel(channel_type, serveraddr, source_name,
                              modulename)
    except Exception as ex:
        logger.exception('Exception when running %s: %s', modulename, ex)
        exit(1)
    finally:
        # terminate here
        exit(0)
示例#35
0
    def run(cls,
            request,
            process_type,
            bat_file,
            cancer_rates="UK",
            cwd="/tmp",
            niceness=0,
            name="",
            model=settings.BC_MODEL):
        """
        Run a process.
        @param request: HTTP request
        @param process_type: either pedigree.MUTATION_PROBS or pedigree.CANCER_RISKS.
        @param bat_file: batch file path
        @keyword cancer_rates: cancer incidence rates used in risk calculation
        @keyword cwd: working directory
        @keyword niceness: niceness value
        @keyword name: log name for calculation, e.g. REMAINING LIFETIME
        """
        if process_type == pedigree.MUTATION_PROBS:
            prog = os.path.join(model['HOME'], model['PROBS_EXE'])
            out = "can_probs"
        else:
            prog = os.path.join(model['HOME'], model['RISKS_EXE'])
            out = "can_risks"

        start = time.time()
        try:
            try:
                os.remove(os.path.join(
                    cwd, out + ".out"))  # ensure output file doesn't exist
            except OSError:
                pass

            # logger.debug(prog + ' -r ' + out+".out -v " + bat_file + " " +
            #              os.path.join(model['HOME'], "Data/incidence_rates_" + cancer_rates + ".nml"))
            process = Popen(
                [
                    prog,
                    '-r',
                    out + ".out",  # results file
                    '-v',  # include model version
                    bat_file,
                    os.path.join(
                        model['HOME'],
                        "Data/incidence_rates_" + cancer_rates + ".nml")
                ],
                cwd=cwd,
                stdout=PIPE,
                stderr=PIPE,
                env=settings.FORTRAN_ENV,
                preexec_fn=lambda: os.nice(niceness) and resource.setrlimit(
                    resource.RLIMIT_STACK,
                    (resource.RLIM_INFINITY, resource.RLIM_INFINITY)))

            (outs, errs) = process.communicate(
                timeout=settings.FORTRAN_TIMEOUT)  # timeout in seconds
            exit_code = process.wait()

            if exit_code == 0:
                with open(os.path.join(cwd, out + ".out"), 'r') as result_file:
                    data = result_file.read()
                logger.info(
                    model.get('NAME', "") + " " +
                    ("MUTATION PROBABILITY" if process_type ==
                     pedigree.MUTATION_PROBS else "RISK ") + name +
                    " CALCULATION: user="******"; elapsed time=" + str(time.time() - start))
                return data
            else:
                logger.error("EXIT CODE (" + out.replace('can_', '') + "): " +
                             str(exit_code))
                logger.error(outs)
                errs = errs.decode("utf-8").replace('\n', '')
                logger.error(errs)
                raise ModelError(errs)
        except TimeoutExpired as to:
            process.terminate()
            logger.error(model.get('NAME', "") + " PROCESS TIMED OUT.")
            logger.error(to)
            raise TimeOutException()
        except Exception as e:
            logger.error(model.get('NAME', "") + ' PROCESS EXCEPTION: ' + cwd)
            logger.error(e)
            raise
示例#36
0
import numpy as np
import argparse
import cv2 as cv
import picamera
import time
import RPi.GPIO as GPIO
import sys
import Adafruit_DHT
import I2C_LCD_driver
import datetime
import os
from enum import Enum
import threading
os.nice(20)

mylcd=I2C_LCD_driver.lcd()
pic_num = 0
currentTemp = 0
busy = False
incubation_period = 0



def up_btn(channel):            #button interupt
    print("up was pushed!")
    if Current_state == States.SET_TEMP:
        global Temp
        Temp +=1

    elif Current_state == States.SET_TIME:
        global Time
示例#37
0
if __name__ == "__main__":
    from SCRIBES.Utils import fork_process
    fork_process()
    from os import nice
    nice(19)
    # from sys import argv, path
    # python_path = argv[1]
    # path.insert(0, python_path)
    from gobject import MainLoop, threads_init
    threads_init()
    from signal import signal, SIGINT, SIG_IGN
    signal(SIGINT, SIG_IGN)
    from IndexerManager import Manager
    Manager()
    MainLoop().run()
示例#38
0
def run_centrifuge(flowcell_job_id, streamed_reads=None):
    """
    Run the metagenomics subprocess command, returning the data from it as a DataFrame.
    Parameters
    ----------
    flowcell_job_id: int
        The primary key of the flowcell ID
    streamed_reads: list of dict
        A list of dictionaries containing read information

    Returns
    -------
    pd.core.frame.DataFrame, int, int, int, pandas.core.frame.DataFrame, int, int
        Dataframe of metagenomics results, total output lines from metagenomics, last read primary key,
        total count of reads analysed, dataframe of any reads that identified as targets,
         number of reads with classifications, number of reads without classifications

    """
    # The JobMaster object
    task = JobMaster.objects.get(pk=flowcell_job_id)
    # The flowcell the reads are from
    flowcell = task.flowcell
    avg_read_length = int(flowcell.average_read_length)
    if avg_read_length == 0:
        logger.error(
            f"Average read length is zero Defaulting to 450, but this is an error."
        )
        avg_read_length = 1000
    if not streamed_reads and not isinstance(streamed_reads, list):
        read_count, last_read, fasta_df_barcode = get_fastq_df(
            flowcell_pk=int(flowcell.id),
            desired_yield=50,
            avg_read_len=avg_read_length,
            task=task,
        )
    else:
        last_read = task.last_read
        fasta_df_barcode = pd.DataFrame(streamed_reads)
        if not fasta_df_barcode.empty:
            fasta_df_barcode = fasta_df_barcode.rename(columns={
                "type": "read_type_id",
                "barcode": "barcode_id"
            })
            fasta_df_barcode["type__name"] = fasta_df_barcode["read_type_id"]
        read_count = fasta_df_barcode.shape[0]
    if fasta_df_barcode.empty:
        return pd.DataFrame(), None, None, None, None, 0, 0
    logger.debug("Flowcell id: {} - number of reads found {}".format(
        flowcell.id, read_count))
    # Create a fastq string to pass to Centrifuge
    fasta_df_barcode["fasta"] = (">read_id=" + fasta_df_barcode["read_id"] +
                                 ",barcode=" +
                                 fasta_df_barcode["barcode_name"] + "\n" +
                                 fasta_df_barcode["sequence"])
    fastqs_data = "\n".join(list(fasta_df_barcode["fasta"]))
    logger.info("Flowcell id: {} - Loading index and Centrifuging".format(
        flowcell.id))
    # Write the generated fastq file to stdin, passing it to the command
    # Use Popen to run the metagenomics command
    # The path to the metagenomics executable
    centrifuge_path = get_env_variable("MT_CENTRIFUGE")
    # The path to the Centrifuge Index
    index_path = get_env_variable("MT_CENTRIFUGE_INDEX")
    # The command to run metagenomics
    cmd = "perl " + centrifuge_path + " -f --mm -k 3 -x " + index_path + " -"
    try:
        out, err = subprocess.Popen(
            cmd.split(),
            preexec_fn=lambda: os.nice(-10),
            stdout=subprocess.PIPE,
            stdin=subprocess.PIPE,
            stderr=subprocess.PIPE,
        ).communicate(input=str.encode(fastqs_data))
    except subprocess.SubprocessError as e:
        logger.warning(f"{e}, running with standard niceness index.")
        out, err = subprocess.Popen(
            cmd.split(),
            stdout=subprocess.PIPE,
            stdin=subprocess.PIPE,
            stderr=subprocess.PIPE,
        ).communicate(input=str.encode(fastqs_data))
    # The standard error
    # out is a bytestring so it needs decoding
    if not out:
        logger.info(
            "Flowcell id: {} - No reads found or no metagenomics output."
            " Check above for error".format(flowcell.id))
        task.running = False
        task.save()
        return None
    centrifuge_output = out.decode()
    # total number of lines of metagenomics output dealt with
    total_centrifuge_output = centrifuge_output.count("\n") - 1
    logger.info(
        "Flowcell id: {} - number of metagenomics output lines is {}".format(
            flowcell.id, total_centrifuge_output))
    # output fields is the column headers for the pandas data frame
    output_fields = ["readID", "seqID", "taxID", "numMatches"]
    # create the DataFrame from the output
    df = pd.read_csv(StringIO(centrifuge_output),
                     sep="\t",
                     usecols=output_fields)
    # split out the barcode_name from the readID string
    df = split_read_id_and_barcodes(df)
    individual_reads_classified = np.unique(df["readID"].values).size
    targets_df = separate_target_cent_output(df, task, fasta_df_barcode)
    # The number of reads we have any form of classification for
    reads_classified = np.unique(df[df["tax_id"].ne(0)]["read_id"].values).size
    # The number of reads we have completely failed to classify
    reads_unclassified = np.unique(
        df[df["tax_id"].eq(0)]["read_id"].values).size  # save the values
    # Get the metadata object. Contains the start time, end time and runtime of the task
    metadata, created = Metadata.objects.get_or_create(task=task)
    return (
        df,
        individual_reads_classified,
        read_count,
        last_read,
        targets_df,
        reads_classified,
        reads_unclassified,
    )
示例#39
0
#!/usr/bin/env python
"""Some comaprison plots"""

from __future__ import print_function, division

import os

os.nice(10)
import sys
from array import array
import numpy as np
import math
from itertools import product, chain
from copy import copy, deepcopy

import ROOT
from MyStyle import My_Style
from comparator import Contribution, Plot

My_Style.cd()

# my packages
import common_utils as cu
import qg_common as qgc
import qg_general_plots as qgp

# Use rootpy to throw exceptions on ROOT errors, but need DANGER enabled
import rootpy
import rootpy.logger.magic as M

M.DANGER.enabled = True
示例#40
0
def internal_astrometry(catfile,hpxfile,nside=128,band='r',plot=False):
    """ Calculate internal relative astrometry.

    Parameters
    ----------
    catfile : merged catalog file
    hpxfile : single epoch catalog file(s)
    nside   : nside for calculation
    band    : band to use
    plot    : plot output

    Returns
    -------
    stats   : output statistics
    """
    nice = os.nice(0)
    os.nice(10-nice)

    if band=='all': band = 'r'

    #print catfile,hpxfile,nside

    #catfile = glob.glob('cat/*_%05d.fits'%pix)[0]
    if not os.path.exists(catfile): 
        msg = "Couldn't find %s"%catfile
        raise Exception(msg)

    columns = [OBJECT_ID,'RA','DEC']

    spread,mag,nepochs = bfields(['WAVG_SPREAD_MODEL','MAG_PSF','NEPOCHS'],band)
    columns += [spread,mag,nepochs]

    cat = load_infiles([catfile],columns)
    # Select stars with 17 < mag < 21
    sel = (np.fabs(cat[spread])<0.002) & \
        (cat[mag]>17) & \
        (cat[mag]<21) & \
        (cat[nepochs] > 1)
    cat = cat[sel]

    if len(cat) == 0:
        print("WARNING: No objects passing selection in: %s"%catfile)
        return np.array([],dtype=int), np.array([])

    #hpxfiles = glob.glob('hpx/%s/*_%05d.fits'%(band,pix))
    hpx = load_infiles(hpxfile, [OBJECT_ID, 'RA', 'DEC'])
    hpx = hpx[np.in1d(hpx[OBJECT_ID],cat[OBJECT_ID])]

    if len(hpx) == 0:
        print("WARNING: No matched objects in: %s"%hpxfile)
        return np.array([],dtype=int), np.array([])
        
    #keyfile = 'key/key_hpx_%05d.fits'%pix
    #key = load_infiles([keyfile],[OBJECT_ID,'FILENAME','OBJECT_NUMBER'])
    #key = key[np.in1d(key[OBJECT_ID],cat[OBJECT_ID])]
    # 
    #key_id = np.char.add(key['FILENAME'],key['OBJECT_NUMBER'].astype(str))
    #hpx_id = np.char.add(hpx['FILENAME'],hpx['OBJECT_NUMBER'].astype(str))
    # 
    #hpx = hpx[np.in1d(hpx_id,key_id)]

    uid,inv,cts = np.unique(hpx[OBJECT_ID],False,True,True)

    # Make sure that the order matches between coadd and single epoch.
    if not np.all(uid == cat[OBJECT_ID]):
        cat = cat[np.in1d(cat[OBJECT_ID],hpx[OBJECT_ID])]
    if not np.all(uid == cat[OBJECT_ID]):
        cat = cat[np.argsort(cat[OBJECT_ID])]
    assert np.all(uid == cat[OBJECT_ID])
    
    ra,dec = cat['RA'][inv],cat['DEC'][inv]

    sepdeg = angsep(ra,dec,hpx['RA'],hpx['DEC'])
    sepsec = sepdeg * 3600.
    sepmas = sepsec * 1000.
    sel = [sepsec > 1e-5]
    sep = sepmas[sel]

    pix = ang2pix(nside,ra[sel],dec[sel])
    upix = np.unique(pix)
    peak = nd.median(sep,labels=pix,index=upix)

    if plot:
        plt.figure()
        draw_angsep(sep)
        if isinstance(plot,basestring):
            outfile = plot
            plt.savefig(outfile,bbox_inches='tight')

    return upix,peak
示例#41
0
    parser.add_argument('-mo', '--merge_only', default=False)
    parser.add_argument('-dr', '--dryrun', action='store_true', default=False)
    parser.add_argument('-fg', '--nolog', action='store_true', default=False)
    parser.add_argument('--syst', default='nominal')
    parser.add_argument('--merge', action='store_true', default=False)
    parser.add_argument('--nice', default=10)
    parser.add_argument('--test', action='store_true', default=False)

    args = parser.parse_args()

    os.system('mkdir -p output')
    outdir = 'output/' + args.outdir
    dryrun = args.dryrun
    verbose = args.verbose
    poolsize = args.ncore
    os.nice(args.nice)

    ## Define amples to run over

    with open('sample_lists.yml', 'r') as flst:
        samplists = yaml.safe_load(flst)

    with open('sample_filelists.yml', 'r') as flst:
        filelists = yaml.safe_load(flst)

    if args.merge_only != False:
        lst_samp = args.merge_only.split(',') if len(
            args.merge_only) > 0 else None
        if len(lst_samp) == 1 and lst_samp[0] == 'all': lst_samp = None
        mergeOutputHists(outdir, samplists['merge_map'], lst_samp=lst_samp)
        exit(0)
示例#42
0
                out = ''

            d[pdbIn] = out

        print "Done."
        return d


##############
## empty test
##############
import Biskit.test as BT


class Test(BT.BiskitTest):
    """Mock test, the Slave is tested in L{Biskit.StructureMaster}"""
    pass


if __name__ == '__main__':

    import os, sys

    if len(sys.argv) == 2:

        niceness = int(sys.argv[1])
        os.nice(niceness)

    slave = StructureSlave()
    slave.start()
示例#43
0
def main():
    os.nice(5)  # Handle mailing lists at non-interactive priority.
    # delete this if you wish

    try:
        MailmanOwner = mm_cfg.DEB_LISTMASTER
    except AttributeError:
        MailmanOwner = 'postmaster@localhost'

    try:
        domain, full = [a.lower() for a in sys.argv[1:]]
        local = full.split("@")[0]
    except:
        # This might happen if we're not using Postfix or
        # /etc/postfix/master.cf is badly misconfigured
        sys.stderr.write('Illegal invocation: %r\n' % ' '.join(sys.argv))
        if len(sys.argv) > 3:
            sys.stderr.write('Did you forget to set '
                             'mailman_destination_recipient_limit=1 '
                             'in main.cf?')
        sys.exit(EX_USAGE)

    # Redirect required addresses to
    if local in ('postmaster', 'abuse', 'mailer-daemon'):
        os.execv("/usr/sbin/sendmail", ("/usr/sbin/sendmail", MailmanOwner))
        sys.exit(0)

    # Assume normal posting to a mailing list
    mlist, func = local, 'post'

    # Let Mailman decide if a list exists.
    from Mailman.Utils import list_exists

    if list_exists(mlist):
        mm_pgm = os.path.join(paths.prefix, 'mail', 'mailman')
        os.execv(mm_pgm, (mm_pgm, func, mlist))
        # NOT REACHED

    # Check for control extension on local part
    for ext in (
            '-admin',
            '-owner',
            '-request',
            '-bounces',
            '-confirm',
            '-join',
            '-leave',
            '-subscribe',
            '-unsubscribe',
    ):
        if local.endswith(ext):
            mlist = local[:-len(ext)]
            func = ext[1:]
            break

    if list_exists(mlist):
        mm_pgm = os.path.join(paths.prefix, 'mail', 'mailman')
        os.execv(mm_pgm, (mm_pgm, func, mlist))
        # NOT REACHED
    else:
        try:
            sys.stderr.write(mm_cfg.DEB_HELP_TEXT)
        except AttributeError:
            sys.exit(EX_NOUSER)

        sys.exit(1)
示例#44
0
def nice(space, inc):
    "Decrease the priority of process by inc and return the new priority."
    try:
        res = os.nice(inc)
    except OSError, e:
        raise wrap_oserror(space, e)
示例#45
0
def renice(niceness):
    try:
        os.nice(niceness)
    except:
        pass
示例#46
0
def make_arguments_parser():
    """Build and return a command line agument parser."""
    if not sys.platform.startswith("win") and sys.stderr.isatty():

        def add_color_emit_ansi(fn):
            """Add methods we need to the class."""
            def new(*args):
                """Method overload."""
                if len(args) == 2:
                    new_args = (args[0], copy(args[1]))
                else:
                    new_args = (args[0], copy(args[1]), args[2:])
                if hasattr(args[0], 'baseFilename'):
                    return fn(*args)
                levelno = new_args[1].levelno
                if levelno >= 50:
                    color = '\x1b[31;5;7m\n '  # blinking red with black
                elif levelno >= 40:
                    color = '\x1b[31m'  # red
                elif levelno >= 30:
                    color = '\x1b[33m'  # yellow
                elif levelno >= 20:
                    color = '\x1b[32m'  # green
                elif levelno >= 10:
                    color = '\x1b[35m'  # pink
                else:
                    color = '\x1b[0m'  # normal
                try:
                    new_args[1].msg = color + str(new_args[1].msg) + ' \x1b[0m'
                except Exception as reason:
                    print(reason)  # Do not use log here.
                return fn(*new_args)

            return new

        # all non-Windows platforms support ANSI Colors so we use them
        log.StreamHandler.emit = add_color_emit_ansi(log.StreamHandler.emit)
    log.basicConfig(level=-1,
                    format="%(levelname)s:%(asctime)s %(message)s",
                    filemode="w",
                    filename=os.path.join(gettempdir(),
                                          "css-html-prettify.log"))
    log.getLogger().addHandler(log.StreamHandler(sys.stderr))
    try:
        os.nice(19)  # smooth cpu priority
        libc = cdll.LoadLibrary('libc.so.6')  # set process name
        buff = create_string_buffer(len("css-html-prettify") + 1)
        buff.value = bytes("css-html-prettify".encode("utf-8"))
        libc.prctl(15, byref(buff), 0, 0, 0)
    except Exception:
        pass  # this may fail on windows and its normal, so be silent.
    # Parse command line arguments.
    parser = ArgumentParser(description=__doc__,
                            epilog="""CSS-HTML-Prettify:
    Takes file or folder full path string and process all CSS/SCSS/HTML found.
    If argument is not file/folder will fail. Check Updates works on Python3.
    StdIn to StdOut is deprecated since may fail with unicode characters.
    CSS Properties are AlphaSorted,to help spot cloned ones,Selectors not.
    Watch works for whole folders, with minimum of ~60 Secs between runs.""")
    parser.add_argument('--version', action='version', version=__version__)
    parser.add_argument('fullpath',
                        metavar='fullpath',
                        type=str,
                        help='Full path to local file or folder.')
    parser.add_argument('--prefix',
                        type=str,
                        help="Prefix string to prepend on output filenames.")
    parser.add_argument('--timestamp',
                        action='store_true',
                        help="Add a Time Stamp on all CSS/SCSS output files.")
    parser.add_argument('--quiet',
                        action='store_true',
                        help="Quiet, Silent, force disable all Logging.")
    parser.add_argument('--checkupdates',
                        action='store_true',
                        help="Check for Updates from Internet while running.")
    parser.add_argument('--after',
                        type=str,
                        help="Command to execute after run (Experimental).")
    parser.add_argument('--before',
                        type=str,
                        help="Command to execute before run (Experimental).")
    parser.add_argument('--watch',
                        action='store_true',
                        help="Re-Compress if file changes (Experimental).")
    parser.add_argument('--group',
                        action='store_true',
                        help="Group Alphabetically CSS Poperties by name.")
    parser.add_argument('--justify',
                        action='store_true',
                        help="Right Justify CSS Properties (Experimental).")
    global args
    args = parser.parse_args()
示例#47
0
    page_list.append(k)
    sft = "/".join((thing["Subject"], thing["Field"], thing["Topic"]))
    # concepts = thing["Related Concepts"].split(",")
    # concept_map[k] = concepts
    topic_map[k] = sft
    for c in concepts:
        if c not in concept_list:
            concept_list.append(c)

pid_override = list(topic_map.keys())

if __name__ == "__main__":
    # tracemalloc.start()
    print("Initialising deep learning HWGen....")

    os.nice(3)

    model = None
    # print("loading...")
    # with open(asst_fname, 'rb') as f:
    #     assignments = pickle.load(f)
    #
    # print("loaded {} assignments".format(len(assignments)))
    #

    USE_CACHED_ASSGTS = True
    SAVE_CACHED_ASSGTS = True
    cache_fname = base + "cached_assgts.csv"
    if USE_CACHED_ASSGTS:
        assignments = pandas.DataFrame.from_csv(cache_fname)
    else:
示例#48
0
#!/usr/bin/python
#concatenate 6 pngs on one wall

import os
import sys
import initialize as my

# check syntax
i = len(sys.argv)
if i != 5:
    print "Usage: pngwall.py dm_contour gas_ov dm_prof outfile"
    exit(1)

os.nice(1)

import initialize as my

from PIL import Image
import ImageDraw
import ImageFont

rr = Image.open(sys.argv[3])

x, y = rr.size
x2 = x / 2
y2 = y / 2

LU = Image.open(sys.argv[1])
lu = LU.resize((x2, y2), Image.ANTIALIAS)
LB = Image.open(sys.argv[2])
lb = LB.resize((x2, y2), Image.ANTIALIAS)
示例#49
0
def run_script(script, scripts_dir, send_result=True):
    args = copy.deepcopy(script['args'])
    args['status'] = 'WORKING'
    args['send_result'] = send_result
    timeout_seconds = script.get('timeout_seconds')
    for param in script.get('parameters', {}).values():
        if param.get('type') == 'runtime':
            timeout_seconds = param['value']
            break

    output_and_send('Starting %s' % script['msg_name'], **args)

    env = copy.deepcopy(os.environ)
    env['OUTPUT_COMBINED_PATH'] = script['combined_path']
    env['OUTPUT_STDOUT_PATH'] = script['stdout_path']
    env['OUTPUT_STDERR_PATH'] = script['stderr_path']
    env['RESULT_PATH'] = script['result_path']
    env['DOWNLOAD_PATH'] = script['download_path']
    env['RUNTIME'] = str(timeout_seconds)
    env['HAS_STARTED'] = str(script.get('has_started', False))

    try:
        script_arguments = parse_parameters(script, scripts_dir)
    except KeyError as e:
        # 2 is the return code bash gives when it can't execute.
        script['exit_status'] = args['exit_status'] = 2
        output = "Unable to run '%s': %s\n\n" % (
            script['name'], str(e).replace('"', '').replace('\\n', '\n'))
        output += 'Given parameters:\n%s\n\n' % str(
            script.get('parameters', {}))
        try:
            output += 'Discovered storage devices:\n%s\n' % str(
                get_block_devices())
        except KeyError:
            pass
        output += 'Discovered interfaces:\n%s\n' % str(get_interfaces())

        output = output.encode()
        args['files'] = {
            script['combined_name']: output,
            script['stderr_name']: output,
        }
        output_and_send(
            'Failed to execute %s: %d' %
            (script['msg_name'], args['exit_status']), **args)
        return False

    try:
        # This script sets its own niceness value to the highest(-20) below
        # to help ensure the heartbeat keeps running. When launching the
        # script we need to lower the nice value as a child process
        # inherits the parent processes niceness value. preexec_fn is
        # executed in the child process before the command is run. When
        # setting the nice value the kernel adds the current nice value
        # to the provided value. Since the runner uses a nice value of -20
        # setting it to 40 gives the actual nice value of 20.
        proc = Popen(script_arguments,
                     stdout=PIPE,
                     stderr=PIPE,
                     env=env,
                     preexec_fn=lambda: os.nice(40))
        capture_script_output(proc, script['combined_path'],
                              script['stdout_path'], script['stderr_path'],
                              timeout_seconds)
    except OSError as e:
        if isinstance(e.errno, int) and e.errno != 0:
            script['exit_status'] = args['exit_status'] = e.errno
        else:
            # 2 is the return code bash gives when it can't execute.
            script['exit_status'] = args['exit_status'] = 2
        stderr = str(e).encode()
        if stderr == b'':
            stderr = b'Unable to execute script'
        args['files'] = {
            script['combined_name']: stderr,
            script['stderr_name']: stderr,
        }
        output_and_send(
            'Failed to execute %s: %d' %
            (script['msg_name'], args['exit_status']), **args)
        sys.stdout.write('%s\n' % stderr)
        sys.stdout.flush()
        return False
    except TimeoutExpired:
        args['status'] = 'TIMEDOUT'
        args['files'] = {
            script['combined_name']: open(script['combined_path'],
                                          'rb').read(),
            script['stdout_name']: open(script['stdout_path'], 'rb').read(),
            script['stderr_name']: open(script['stderr_path'], 'rb').read(),
        }
        if os.path.exists(script['result_path']):
            args['files'][script['result_name']] = open(
                script['result_path'], 'rb').read()
        output_and_send(
            'Timeout(%s) expired on %s' %
            (str(timedelta(seconds=timeout_seconds)), script['msg_name']),
            **args)
        return False
    else:
        script['exit_status'] = args['exit_status'] = proc.returncode
        args['files'] = {
            script['combined_name']: open(script['combined_path'],
                                          'rb').read(),
            script['stdout_name']: open(script['stdout_path'], 'rb').read(),
            script['stderr_name']: open(script['stderr_path'], 'rb').read(),
        }
        if os.path.exists(script['result_path']):
            args['files'][script['result_name']] = open(
                script['result_path'], 'rb').read()
        output_and_send(
            'Finished %s: %s' % (script['msg_name'], args['exit_status']),
            **args)
        if proc.returncode != 0:
            return False
        else:
            return True
示例#50
0
def main():
    parser = argparse.ArgumentParser(
        description='Download and run scripts from the MAAS metadata service.')
    parser.add_argument(
        "--config",
        metavar="file",
        help="Specify config file",
        default='/etc/cloud/cloud.cfg.d/91_kernel_cmdline_url.cfg')
    parser.add_argument("--ckey",
                        metavar="key",
                        help="The consumer key to auth with",
                        default=None)
    parser.add_argument("--tkey",
                        metavar="key",
                        help="The token key to auth with",
                        default=None)
    parser.add_argument("--csec",
                        metavar="secret",
                        help="The consumer secret (likely '')",
                        default="")
    parser.add_argument("--tsec",
                        metavar="secret",
                        help="The token secret to auth with",
                        default=None)
    parser.add_argument("--apiver",
                        metavar="version",
                        help="The apiver to use (\"\" can be used)",
                        default=MD_VERSION)
    parser.add_argument("--url",
                        metavar="url",
                        help="The data source to query",
                        default=None)
    parser.add_argument("--no-send",
                        action='store_true',
                        default=False,
                        help="Don't send results back to MAAS")
    parser.add_argument("--no-download",
                        action='store_true',
                        default=False,
                        help="Assume scripts have already been downloaded")

    parser.add_argument(
        "storage_directory",
        nargs='?',
        default=os.path.abspath(os.path.join(os.path.dirname(__file__), '..')),
        help="Directory to store the extracted data from the metadata service."
    )

    args = parser.parse_args()

    creds = {
        'consumer_key': args.ckey,
        'token_key': args.tkey,
        'token_secret': args.tsec,
        'consumer_secret': args.csec,
        'metadata_url': args.url,
    }

    if args.config:
        read_config(args.config, creds)

    url = creds.get('metadata_url')
    if url is None:
        fail("URL must be provided either in --url or in config\n")
    url = "%s/%s/" % (url, args.apiver)

    # Disable the OOM killer on the runner process, the OOM killer will still
    # go after any tests spawned.
    oom_score_adj_path = os.path.join('/proc', str(os.getpid()),
                                      'oom_score_adj')
    open(oom_score_adj_path, 'w').write('-1000')
    # Give the runner the highest nice value to ensure the heartbeat keeps
    # running.
    os.nice(-20)

    # Make sure installing packages is noninteractive for this process
    # and all subprocesses.
    if 'DEBIAN_FRONTEND' not in os.environ:
        os.environ['DEBIAN_FRONTEND'] = 'noninteractive'

    heart_beat = HeartBeat(url, creds)
    if not args.no_send:
        heart_beat.start()

    scripts_dir = os.path.join(args.storage_directory, 'scripts')
    os.makedirs(scripts_dir, exist_ok=True)
    out_dir = os.path.join(args.storage_directory, 'out')
    os.makedirs(out_dir, exist_ok=True)

    has_content = True
    fail_count = 0
    if not args.no_download:
        has_content = download_and_extract_tar("%s/maas-scripts/" % url, creds,
                                               scripts_dir)
    if has_content:
        fail_count = run_scripts_from_metadata(url, creds, scripts_dir,
                                               out_dir, not args.no_send,
                                               not args.no_download)

    # Signal success or failure after all scripts have ran. This tells the
    # region to transistion the status.
    if fail_count == 0:
        output_and_send('All scripts successfully ran', not args.no_send, url,
                        creds, 'OK')
    else:
        output_and_send('%d test scripts failed to run' % fail_count,
                        not args.no_send, url, creds, 'FAILED')

    heart_beat.stop()
sys.path.append('../../../')
sys.path.append('../../../communicator/python/')

from communicator.python.Communicator import Message
from communicator.python.LayerCommunicator import LayerCommunicator
import communicator.python.interf_pb2 as interf

# These are the list of arguments used to run each one of the scenarios.
cmd_single_radio = ['--delayaftertx=0', '--txslots=20', '--txgain=16', '--rxonly']
cmd_lbt_disabled = ['--delayaftertx=0', '--txslots=20', '--txgain=16']
cmd_lbt_enabled  = ['--delayaftertx=0', '--txslots=20', '--txgain=16', '--lbtthreshold=-78', '--lbtbackoff=16']

if __name__ == '__main__':

    try:
        new_value = os.nice(-20)
        print("Nice set to:",new_value)
    except OSError:
        print("Could not set niceness")

    source_module = interf.MODULE_MAC
    print("Create CommManager object.")
    lc = LayerCommunicator(source_module, [interf.MODULE_PHY])

    # Fix PHY Parameters.
    radio_id = 200
    # Fix APP Parameters.
    send_to_id = 100

    cmd_single_radio.append("--radioid="+str(radio_id))
    cmd_single_radio.append("--sendtoid="+str(send_to_id))
示例#52
0
#!/usr/bin/env python3
"""
compute sizes of all connected components.
sort and display.
"""

from geo.point import Point
from math import floor, sqrt, pi, log
from collections import defaultdict
from sys import argv
from os import nice, name
if name == "posix":  # on ne sait jamais, au cas ou vous puissiez être sous Windows :-)
    nice(1)  # set highest priority available to the python executer


def load_instance(filename):
    """
    loads .pts file.
    returns distance limit and points.
    """
    with open(filename, "r") as instance_file:
        lines = iter(instance_file)
        distance = float(next(lines))
        points = [Point([float(f) for f in l.split(",")]) for l in lines]
    return distance, points


def facteur_optimal(distance, longueur_liste_points, dimension=2):
    """renvoie un facteur optimal pour la taille des carrés du quadrillage,
    (optimal tant que l'entrée est une distribution aléatoire uniforme)"""
示例#53
0
#!/usr/bin/python

import os
import sys
import threading

nproc = 48
os.nice(8)

semaphore = threading.Semaphore(nproc)


def run_command(cmd):
    with semaphore:
        os.system(cmd)


os.system("mkdir vid")
center = open("mt/pos_sss", "r")
i = 0
for line in center:
    i = i + 1
    val = line.split()
    x = float(val[0])
    y = float(val[1])
    z = float(val[2])
    r = float(val[3])
    r = 0.001

    # no following, simply 0.5, box of 10kpc
    #x = 0.5; y=0.5; z=0.5; r=0.01
示例#54
0
 def run(self) -> None:
     os.nice(5)
     for file in FILES_DIRECTORY.rglob("*.ctb"):
         read_cached_ctb_file(file.absolute())
     for file in FILES_DIRECTORY.rglob("*.ctb"):
         read_cached_preview(file.absolute())
示例#55
0
        #         ret, proc, t = ret
        #         print "Processed {} in {:.1f} seconds".format(proc,t)
        # else:
        #     def callback(ret):
        #         pass

        if len(to_run) > 3000:
            print "[!] You might want to kill this at some point and run the rest with --skip_already_done because of memory leaks"

        # Now run them
        if len(to_run) == 1:
            print "Running one chain"
            map(run_chain, enumerate(to_run))
            print "Done"
        else:
            os.nice(4)
            runner = pyrun.Runner(nproc=min(args.ncpu, len(to_run)),
                                  func=run_chain,
                                  dot_type=(3 if len(to_run) < 500 else 0))
            runner.add_args(to_run)
            runner.run(print_callback=callback)

    if args.tag:
        outdir_limits = "../limits/{}/".format(args.tag)
        #os.system("mkdir -p {}".format(outdir_limits))
        #os.system("cp ../misc/signal_regions.h {}/".format(outdir_limits))
        #os.system("cp ../misc/bdt.h {}/".format(outdir_limits))
        #os.system("cp yieldMaker.C make_shape_hists.py py_doAll.py plot_all.py {}/".format(outdir_limits))

        if args.shapes:
            import make_shape_hists
示例#56
0
 def preexec_fn() -> None:  # pragma: no cover
     if nice_level is not None:
         try:
             os.nice(nice_level)
         except:  # pylint: disable=bare-except
             pass
示例#57
0
#!/usr/bin/python
import socket, atexit, math, threading, sys, os, time
from MotorHelper import MotorHelper
import math
import i2clcd
import RPi.GPIO as GPIO
GPIO.setmode(GPIO.BCM)
import netifaces as ni
import Adafruit_PCA9685
from time import sleep
from tenDOFclass import *
import threading

os.nice(20)  # High priority for better performance
servo = Adafruit_PCA9685.PCA9685(address=0x40)
motor = Adafruit_PCA9685.PCA9685(address=0x60)
motor.set_pwm_freq(1000)
servo.set_pwm_freq(60)
DC1Helper = MotorHelper(motor, 0, 17, 18)
DC2Helper = MotorHelper(motor, 1, 20, 21)
tenDOF = tenDOFclass()

isStepping = False
# Stepper Motor part
ControllPin = [22, 23, 24, 25]
shootled = 6
GPIO.setup(shootled, GPIO.OUT)
GPIO.output(shootled, False)

for pin in ControllPin:
    GPIO.setup(pin, GPIO.OUT)
示例#58
0
# I.3. [AUTO] Other parameters
# Finalize the settings
s['duration'] = DURATION
s['timestamp'] = datetime.datetime.now().strftime('%y%m%d_%H%M%S')
s['channel_count'] = sum(active_channels.values())  # Add the channel count
s = {**s, **active_channels}  # Merge both dict
print("[INFO] Acquisition parameters:")
for item in s.items():
    print(item)

# Set some other parameters based on hardware and design
s['if_amplifier_bandwidth'] = 2e6  # [Hz] Nothing we can do about that?
s['fir_gain'] = 9.0
s['adc_ref'] = 1  # [V] Reference voltage on the ADC inputs
s['adc_bits'] = 12
s['d_antenna'] = 28e-3  # [m] Distance between the two antennas. Corresponds to lambda/2 for f0 = 5.3 GHz
s['angle_limit'] = 55
s['angle_pad'] = 100
s['c'] = 299792458.0  # [m/s]
s['channel_offset'] = 21  # Not sure what that is
ADC_BITS = 12
ADC_BYTES = ADC_BITS // 8 + 1
adc_sampling_frequency = 1e6  # [Hz] Effective sampling rate for the ADC as the IF amplifier has a 2 MHz bandwidth
#BYTE_USB_READ = 0x10000
BYTE_USB_READ = 0x10000

# II. Run Acquisition loop
if __name__ == '__main__':
    print("[INFO] Process set to niceness",
          os.nice(0))  # Only root can be below 0
    main()
示例#59
0
文件: job_dispatch.py 项目: rolk/ert
                  )  # Do not really look at exit status yet...

    if status[0]:
        if job.get("error_file"):
            if os.path.exists(job.get("error_file")):
                status = (False, -1, "Found the error file:%s - job failed" %
                          job.get("error_file"))

    return status


#################################################################

#################################################################

os.nice(19)
if not os.path.exists(run_path):
    sys.stderr.write(
        "*****************************************************************\n")
    sys.stderr.write("** FATAL Error: Could not find directory: %s \n" %
                     run_path)
    sys.stderr.write("** CWD: %s\n" % os.getcwd())
    sys.stderr.write(
        "*****************************************************************\n")

    fileH = open(EXIT_file, "w")
    fileH.write("Could not locate:%s " % run_path)
    fileH.write("CWD: %s" % os.getcwd())
    fileH.close()
    sys.exit(-1)
示例#60
0
send_to_logbook("WARNING", "Restarting...")

# influxdb init
client = InfluxDBClient(influxdb_host, influxdb_port)
client.switch_database('basecamp')
log.info("influxdb will be contacted on " + str(influxdb_host) + ":" +
         str(influxdb_port))
influx_json_body = [{
    "measurement": "water",
    "tags": {},
    "time": "",
    "fields": {}
}]

# OS: highest priority
os.nice(-20)

# GPIO
probe = "CSID7"
GPIO.setup(probe, GPIO.IN)

# =======================================================
# main loop

while True:
    while not GPIO.input(probe):
        time.sleep(0.01)
    while GPIO.input(probe):
        time.sleep(0.01)
    # an impulse has been detected
    log.info("1l impulse detected.")