Example #1
0
 def run(self):
     """
     main loop
     · run events
     · run .update() functions
     · update viewport
     · and sleep if there's time left
     """
     smallju_frame = False
     while self.running:
         t = time.clock_gettime(time.CLOCK_MONOTONIC)
         for ev in pygame.event.get():
             ev.state = self.view_proxy
             handler.dispatch(ev, self, ev)
         if self.view_proxy == "mandel" and self.smallju_show and \
           self.smallju.updating and smallju_frame:
             self.smallju.update()
             self.update = True
         elif self.view().updating:
             self.view().update()
             self.update = True
         smallju_frame ^= self.smallju_show
         if self.update:
             self.draw()
             self.update = False
         # naively sleep until we expect the new frame
         t = time.clock_gettime(time.CLOCK_MONOTONIC) - t
         if t < config["interface"]["max_fps"]:
             time.sleep(config["interface"]["max_fps"] - t)
Example #2
0
 def test_pthread_getcpuclockid(self):
     clk_id = time.pthread_getcpuclockid(threading.get_ident())
     self.assertTrue(type(clk_id) is int)
     self.assertNotEqual(clk_id, time.CLOCK_THREAD_CPUTIME_ID)
     t1 = time.clock_gettime(clk_id)
     t2 = time.clock_gettime(clk_id)
     self.assertLessEqual(t1, t2)
Example #3
0
    def infinite_loop(self):
        self.debug.write("Successfully launched")

        self.gps.wait()

        tick = 1
        while True:

            if self.kill_now:
                self.debug.write("Quitting")
                self.log.close()
                self.uart.close()
                self.debug.close()
                break

            # monotonic clock will not change as system time changes
            begin = time.clock_gettime(time.CLOCK_MONOTONIC)
            # system time can be converted to date/time stamp
            stamp = time.time()
            t1 = threading.Thread(target=self.log_data, args=(stamp,))
            t1.start()

            if tick >= self.cam_period:
                tick = 1
                t2 = threading.Thread(target=self.take_picture, args=(begin,))
                t2.start()
            else:
                tick = tick + 1

            delta = time.clock_gettime(time.CLOCK_MONOTONIC) - begin

            if self.serial_period > delta:
                time.sleep(self.serial_period - delta) 
Example #4
0
    def sendTexture(scene):
        for name, target in Splash._targets.items():
            if target._texture is None or target._texWriterPath is None:
                return
    
            buffer = bytearray()
            pixels = [pix for pix in target._texture.pixels]
            pixels = numpy.array(pixels)
            pixels = (pixels * 255.0).astype(numpy.ubyte)
            buffer += pixels.tostring()
            currentTime = time.clock_gettime(time.CLOCK_REALTIME) - target._startTime

            if target._texWriter is None or target._texture.size[0] != target._texSize[0] or target._texture.size[1] != target._texSize[1]:
                try:
                    from pyshmdata import Writer
                    os.remove(target._texWriterPath)
                except:
                    pass
    
                if target._texWriter is not None:
                    del target._texWriter
    
                target._texSize[0] = target._texture.size[0]
                target._texSize[1] = target._texture.size[1]
                target._texWriter = Writer(path=target._texWriterPath, datatype="video/x-raw, format=(string)RGBA, width=(int){0}, height=(int){1}, framerate=(fraction)1/1".format(target._texSize[0], target._texSize[1]), framesize=len(buffer))
            target._texWriter.push(buffer, floor(currentTime * 1e9))

            # We send twice to pass through the Splash input buffer
            time.sleep(0.1)
            currentTime = time.clock_gettime(time.CLOCK_REALTIME) - target._startTime
            target._texWriter.push(buffer, floor(currentTime * 1e9)) 
Example #5
0
 def cpu_measurement_wrapper(*args, **kwargs):
     start = time.clock_gettime(time.CLOCK_THREAD_CPUTIME_ID)
     try:
         return f(*args, **kwargs)
     finally:
         end = time.clock_gettime(time.CLOCK_THREAD_CPUTIME_ID)
         tracking[1] += 1
         tracking[2] += (end - start)
Example #6
0
    def _generic_impl(self):
        try:
            yield from self.connect()
        except AuthenticationError as e:
            self._statistics.req_401_rsp += 1
            self._logger.error("Authentication failed. Bad username/password.")
            raise RestError(401, "bad username/password")
        except ValueError as e:
            self._statistics.req_401_rsp += 1
            self._logger.error("Authentication failed. Missing BasicAuth.")
            raise RestError(401, "must supply BasicAuth")

        # ATTN: special checks for POST and PUT
        if self.request.method == "DELETE" and not is_config(self.request.uri):
            self._logger.error("Operational data DELETE not supported.")
            raise RestError(405, "can't delete operational data")

        # ATTN: make json it's own request handler
        if self.request.method == "GET" and is_schema_api(self.request.uri):
            return 200, "application/vnd.yang.data+json", get_json_schema(self._schema_root, self.request.uri)

        # convert url and body into xml payload 
        body = self.request.body.decode("utf-8")
        body_header = self.request.headers.get("Content-Type")
        convert_payload = (body, body_header)

        try:
            if self.request.method in ("GET"):
                converted_url = convert_get_request_to_xml(self._schema_root, self.request.uri)
            else:
                converted_url = self._confd_url_converter.convert(self.request.method, self.request.uri, convert_payload)
        except Exception as e:
            self._logger.error("invalid url requested %s, %s", self.request.uri, e)
            raise RestError(404, "Resource target or resource node not found")

        self._logger.debug("converted url: %s" % converted_url)

        if self._configuration.log_timing:
            sb_start_time = time.clock_gettime(time.CLOCK_REALTIME)

        result, netconf_response = yield from self._netconf.execute(self._netconf_operation, self.request.uri, converted_url)

        if self._configuration.log_timing:
            sb_end_time = time.clock_gettime(time.CLOCK_REALTIME)
            self._logger.debug("southbound transaction time: %s " % sb_end_time - sb_start_time)

        if result != Result.OK:
            code = map_error_to_http_code(result)
            raise RestError(code, netconf_response)

        # convert response to match the accept type
        code, response = self._convert_response_and_get_status(self._netconf_operation, self._accept_type, netconf_response)
        return code, self._accept_type.value, response
Example #7
0
def main_loop(bar, inputs = None):
    # TODO: remove eventinputs again?
    #inputs += bar.widget.eventinputs()
    if inputs == None:
        inputs = global_inputs

    global_update = True
    def signal_quit(signal, frame):
        quit_main_loop()
    signal.signal(signal.SIGINT, signal_quit)
    signal.signal(signal.SIGTERM, signal_quit)

    # main loop
    while not core.shutdown_requested() and bar.is_running():
        now = time.clock_gettime(time.CLOCK_MONOTONIC)
        if bar.widget.maybe_timeout(now):
            global_update = True
        data_ready = []
        if global_update:
            painter = bar.painter()
            painter.widget(bar.widget)
            data_ready = select.select(inputs,[],[], 0.00)[0]
            if not data_ready:
                #print("REDRAW: " + str(time.clock_gettime(time.CLOCK_MONOTONIC)))
                painter.flush()
                global_update = False
            else:
                pass
                #print("more data already ready")
        if not data_ready:
            # wait for new data
            next_timeout = now + 360 # wait for at most one hour until the next bar update
            to = bar.widget.next_timeout()
            if to != None:
                next_timeout = min(next_timeout, to)
            now = time.clock_gettime(time.CLOCK_MONOTONIC)
            next_timeout -= now
            next_timeout = max(next_timeout,0.1)
            #print("next timeout = " + str(next_timeout))
            data_ready = select.select(inputs,[],[], next_timeout)[0]
            if core.shutdown_requested():
                break
        if not data_ready:
            pass #print('timeout!')
        else:
            for x in data_ready:
                x.process()
                global_update = True
    bar.proc.kill()
    for i in inputs:
        i.kill()
    bar.proc.wait()
Example #8
0
 def test_pthread_getcpuclockid(self):
     clk_id = time.pthread_getcpuclockid(threading.get_ident())
     self.assertTrue(type(clk_id) is int)
     # when in 32-bit mode AIX only returns the predefined constant
     if not platform.system() == "AIX":
         self.assertNotEqual(clk_id, time.CLOCK_THREAD_CPUTIME_ID)
     elif (sys.maxsize.bit_length() > 32):
         self.assertNotEqual(clk_id, time.CLOCK_THREAD_CPUTIME_ID)
     else:
         self.assertEqual(clk_id, time.CLOCK_THREAD_CPUTIME_ID)
     t1 = time.clock_gettime(clk_id)
     t2 = time.clock_gettime(clk_id)
     self.assertLessEqual(t1, t2)
Example #9
0
def ping(target, timeout=5.0):
    request = IcmpEcho(payload=os.urandom(32))
    with socket.socket(socket.AF_INET, socket.SOCK_DGRAM, socket.IPPROTO_ICMP) as s:
        s.connect(target.address)
        s.settimeout(timeout)

        start = time.clock_gettime(time.CLOCK_MONOTONIC)
        s.send(request.to_bytes())
        response = s.recv(65536)
        end = time.clock_gettime(time.CLOCK_MONOTONIC)

    response = IcmpEcho.from_bytes(response)
    rtt_ms = (end - start) * 1000
    print('Got response in {delay:.3f} ms'.format(delay=rtt_ms))
Example #10
0
def RunCompilation(num):
    global total_tasks
    global total_time
    global end

    while True:
        start = time.clock_gettime(time.CLOCK_MONOTONIC)
        if os.system("./compile.sh /usr/bin/dist-clang/clang++ {} > /dev/null".format(num)) != 0:
            break
        total_tasks += 1
        total_time += time.clock_gettime(time.CLOCK_MONOTONIC) - start

        if len(sys.argv) > 2 and total_tasks >= int(sys.argv[2]):
            end = time.clock_gettime(time.CLOCK_MONOTONIC)
            break
Example #11
0
 def test_FDTimer_absolute(self):
     t = timers.FDTimer(timers.CLOCK_REALTIME)
     start = time.time()
     t.settime(time.clock_gettime(time.CLOCK_REALTIME)+5.0, 0.0, absolute=True)
     print(t.read())
     t.close()
     self.assertAlmostEqual(time.time()-start, 5.0, places=2)
Example #12
0
 def poll_timed(self, timeout : float, clock_id : int = None) -> bytes:
     '''
     Wait for a message to be broadcasted on the bus.
     The caller should make a copy of the received message,
     without freeing the original copy, and parse it in a
     separate thread. When the new thread has started be
     started, the caller of this function should then
     either call `Bus.poll_timed` again or `Bus.poll_stop`.
     
     @param   timeout:float  The time the function shall fail with `os.errno.EAGAIN`,
                             if it has not already completed
     @param   clock_id:int?  The clock `timeout` is measured in, it must be a
                             predictable clock, if `None`, `timeout` is measured in
                             relative time instead of absolute time
     @return  :bytes         The received message
                             NB! The received message will not be decoded from UTF-8
     '''
     from native_bus import bus_poll_timed_wrapped
     if clock_id is None:
         import time
         clock_id = time.CLOCK_MONOTONIC_RAW
         timeout += time.clock_gettime(clock_id)
     (message, e) = bus_poll_timed_wrapped(self.bus, timeout, clock_id)
     if message is None:
         raise self.__oserror(e)
     return message
Example #13
0
    def dump_cpu_profiles(tofile=sys.stderr):
        elapsed_cpu = time.clock_gettime(time.CLOCK_THREAD_CPUTIME_ID) - baseline_cpu
        elapsed_wall = time.monotonic() - baseline_wall

        print('Elapsed: {wall:.1f}   CPU: {cpu:.1f} ({percent:.0f}%)'.format(
            wall=elapsed_wall,
            cpu=elapsed_cpu,
            percent=100.0 * elapsed_cpu / elapsed_wall), file=tofile)
        print('{rank:4s} {name:60s} {count:6s} {persec:6s} {total:8s} {each:8s} {fraction:6s}'.format(
            rank='#',
            name='Function',
            count='Calls',
            persec='(/sec)',
            total='Total(s)',
            each='Each(us)',
            fraction="Frac"), file=tofile)

        rank = 1
        for name, count, total in sorted(_cpu_tracking, key=operator.itemgetter(2), reverse=True):
            if count == 0:
                break

            print('{rank:4d} {name:60s} {count:6d} {persec:6.1f} {total:8.3f} {each:8.0f} {fraction:6.1f}'.format(
                rank=rank,
                name=name,
                count=count,
                persec=1.0 * count / elapsed_wall,
                total=total,
                each=total * 1e6 / count,
                fraction=100.0 * total / elapsed_cpu), file=tofile)
            rank += 1

        tofile.flush()
    def test_record_event_sequence_sync_has_reasonable_relative_timestamp(self):
        monotonic_time_first = time.clock_gettime(time.CLOCK_MONOTONIC)

        calls = self.call_start_stop_event_sync()
        relative_time_first = calls[0][2][2][0][0]
        self.interface_mock.ClearCalls()
        calls = self.call_start_stop_event_sync()
        relative_time_second = calls[0][2][2][1][0]

        monotonic_time_second = time.clock_gettime(time.CLOCK_MONOTONIC)

        relative_time_difference = relative_time_second - relative_time_first
        self.assertLessEqual(0, relative_time_difference)
        monotonic_time_difference = 1e9 * \
            (monotonic_time_second - monotonic_time_first)
        self.assertLessEqual(relative_time_difference,
                             monotonic_time_difference)
Example #15
0
def main(argv):
    """
    Entry point for ntpclient.py.

    Arguments:
        argv: command line arguments
    """
    res = None
    quiet = False
    args = set(argv)
    if {'-q', '--quiet'}.intersection(args):
        quiet = True
    if '-s' in argv:
        server = argv[argv.index('-s')+1]
    elif '--server' in argv:
        server = argv[argv.index('--server')+1]
    elif 'NTPSERVER' in os.environ:
        server = os.environ['NTPSERVER']
    else:
        server = 'pool.ntp.org'
    if {'-h', '--help'}.intersection(args):
        print('Usage: ntpclient [-h|--help] [-q|--quiet] [-s|--server timeserver]')
        print(f'Using time server {server}.')
        sys.exit(0)
    t1 = time.clock_gettime(time.CLOCK_REALTIME)
    ntptime = get_ntp_time(server)
    t4 = time.clock_gettime(time.CLOCK_REALTIME)
    # It is not guaranteed that the NTP time is *exactly* in the middle of both
    # local times. But it is a reasonable simplification.
    roundtrip = round(t4 - t1, 4)
    localtime = (t1 + t4) / 2
    diff = localtime - ntptime
    if os.geteuid() == 0:
        time.clock_settime(time.CLOCK_REALTIME, ntptime)
        res = 'Time set to NTP time.'
    localtime = datetime.fromtimestamp(localtime)
    ntptime = datetime.fromtimestamp(ntptime)
    if not quiet:
        print('Using server {}.'.format(server))
        print('NTP call took approximately', roundtrip, 's.')
        print('Local time value:', localtime.strftime('%a %b %d %H:%M:%S.%f %Y.'))
        print('NTP time value:', ntptime.strftime('%a %b %d %H:%M:%S.%f %Y.'),
              '±', roundtrip/2, 's.')
        print('Local time - ntp time: {:.6f} s.'.format(diff))
        if res:
            print(res)
Example #16
0
	def HandleEvent(self, event):
		now = time.clock_gettime(time.CLOCK_MONOTONIC)
		if not self._first_event:
			self._first_event = event
			self._first_event_timestamp = now
		self._last_event = event
		self._last_event_timestamp = now
		self._count_by_type[event['type']] += 1
Example #17
0
    def test_clock_settime(self):
        t = time.clock_gettime(time.CLOCK_REALTIME)
        try:
            time.clock_settime(time.CLOCK_REALTIME, t)
        except PermissionError:
            pass

        if hasattr(time, "CLOCK_MONOTONIC"):
            self.assertRaises(OSError, time.clock_settime, time.CLOCK_MONOTONIC, 0)
Example #18
0
 def log(self,sstr,level='msg'):
     now=time.strftime("%y/%m/%d %H:%M:%S")
     nows=(str(time.clock_gettime(time.CLOCK_REALTIME)).split('.')[1]+'000')[0:3]
     head=tail=''
     if level=='error':
         head='\033[31m'
         tail='\033[0m'
     output=now+nows+"\033[32m ["+str(level)+"]\033[0m "+":"+head+str(sstr)+tail
     print(output,file=sys.stderr)
     self.logf.write(output+"\n")
Example #19
0
def createRecordData(timeSec:float, dataSz:int, node, sAddr, nAddr, frames):
    return {
        'node'  : node,
        'sz'    : dataSz,
        'saddr' : sAddr,
        'naddr' : nAddr,
        'frames': frames,
        'time'  : timeSec,
        'start' : time.clock_gettime(time.CLOCK_MONOTONIC),
    }
Example #20
0
def handler(conn):
    clean_list = []
    while True:
        if conn.poll(10):
            data = conn.recv()
            sh_thread = Process(target=gs_shakalizing, args=data)
            sh_thread.start()
            sh_thread.join()
            clean_list.append((data[1], time.clock_gettime(0)))
        curr_time = time.clock_gettime(0)
        new_list = []
        for (filename, l_time) in clean_list:
            delta = curr_time - l_time
            if delta > TIME_DELTA:
                gen_file = path.join(app.config['UPLOAD_FOLDER'], filename)
                remove(gen_file)
            else:
                new_list.append((filename, l_time))
        clean_list = new_list
Example #21
0
def report():
    global TRACING_START_TIME
    global TRACING_END_TIME
    global REPORT_START_TIME
    global REPORT_END_TIME

    REPORT_START_TIME = time.clock_gettime(time.CLOCK_MONOTONIC)

    _reportMemStat()
    _reportCrossScope()
    _reportMemLeak()

    REPORT_END_TIME = time.clock_gettime(time.CLOCK_MONOTONIC)

    print("{:=^80}".format(''))
    print("Tracing time: %.3f s, Report generation time: %.3f s" % (
        TRACING_END_TIME - TRACING_START_TIME,
        REPORT_END_TIME - REPORT_START_TIME
    ))
    print()
def set_gamma_(*crtcs, screen = 0, **kwargs):
    old_set_gamma(*crtcs, screen = screen, **kwargs)
    (R_curve, G_curve, B_curve) = aux.translate_to_integers()
    mraw = time.clock_gettime(time.CLOCK_MONOTONIC_RAW)
    print('time: %f'  % mraw)
    print('monitors: %s'  % ' '.join('%i.%i' % (screen, crtc) for crtc in crtcs))
    print('max: %i %i %i'  % (2**16 - 1, 2**16 - 1, 2**16 - 1))
    print('red: %s'   % ' '.join([str(stop) for stop in R_curve]))
    print('green: %s' % ' '.join([str(stop) for stop in G_curve]))
    print('blue: %s'  % ' '.join([str(stop) for stop in B_curve]))
    print()
Example #23
0
 def test_monotonic_settime(self):
     t1 = time.monotonic()
     realtime = time.clock_gettime(time.CLOCK_REALTIME)
     # jump backward with an offset of 1 hour
     try:
         time.clock_settime(time.CLOCK_REALTIME, realtime - 3600)
     except PermissionError as err:
         self.skipTest(err)
     t2 = time.monotonic()
     time.clock_settime(time.CLOCK_REALTIME, realtime)
     # monotonic must not be affected by system clock updates
     self.assertGreaterEqual(t2, t1)
Example #24
0
def get_clock():
    """ Return a number of second since a unspecified point in time

        If will use CLOCK_MONOTONIC if available or fallback to time.time()

        It's useful to know of some event occurred before/after another one.
        It could be also useful to run on action every N seconds (note that
        system suspend might stop that clock).
    """
    if sys.version_info[0] >= 3 and sys.version_info[1] >= 3:
        return time.clock_gettime(time.CLOCK_MONOTONIC)
    else:
        return time.time()
Example #25
0
 def __init__(self, env):
     super().__init__()
     self.method = env.get("REQUEST_METHOD")
     self.uri = env.get("REQUEST_URI")
     self.host = env.get("HTTP_HOST")
     self.script_uri = env.get("SCRIPT_NAME")
     self.script_path = env.get("SCRIPT_FILENAME")
     self.pathinfo = env.get("PATH_INFO")
     self.querystring = env.get("QUERY_STRING")
     self.remoteaddr = env.get("REMOTE_ADDR")
     self.remoteport = env.get("REMOTE_PORT")
     self.scheme = env.get("wsgi.url_scheme")
     if clock_thread is not None:
         self.icpu = time.clock_gettime(clock_thread)
Example #26
0
    async def info(self):
        """
        Returns basic system information.
        """
        buildtime = sw_buildtime()
        if buildtime:
            buildtime = datetime.fromtimestamp(int(buildtime)),

        uptime = (await (await Popen(
            "env -u TZ uptime | awk -F', load averages:' '{ print $1 }'",
            stdout=subprocess.PIPE,
            shell=True,
        )).communicate())[0].decode().strip()

        serial = await self._system_serial()

        product = (await(await Popen(
            ['dmidecode', '-s', 'system-product-name'],
            stdout=subprocess.PIPE,
        )).communicate())[0].decode().strip() or None

        manufacturer = (await(await Popen(
            ['dmidecode', '-s', 'system-manufacturer'],
            stdout=subprocess.PIPE,
        )).communicate())[0].decode().strip() or None

        return {
            'version': self.version(),
            'buildtime': buildtime,
            'hostname': socket.gethostname(),
            'physmem': sysctl.filter('hw.physmem')[0].value,
            'model': sysctl.filter('hw.model')[0].value,
            'cores': sysctl.filter('hw.ncpu')[0].value,
            'loadavg': os.getloadavg(),
            'uptime': uptime,
            'uptime_seconds': time.clock_gettime(5),  # CLOCK_UPTIME = 5
            'system_serial': serial,
            'system_product': product,
            'license': await self.__get_license(),
            'boottime': datetime.fromtimestamp(
                struct.unpack('l', sysctl.filter('kern.boottime')[0].value[:8])[0]
            ),
            'datetime': datetime.utcnow(),
            'timezone': (await self.middleware.call('datastore.config', 'system.settings'))['stg_timezone'],
            'system_manufacturer': manufacturer,
        }
Example #27
0
    def monotonic_time():
        """Get the number of seconds passed since a fixed past moment.

        A monotonic clock measures the time elapsed since an arbitrary
        but immutable instant in the past. The value itself has no
        direct intrinsic meaning but the difference between two such
        values does, as it is guaranteed to accurately represent the
        amount of time passed between when those two measurements were
        taken, no matter the adjustments to the clock that occurred in
        between. Even NTP adjustments are ignored, meaning that the
        flow of time here is dictated only by the system clock and thus
        subject to any drift it may have.

        return (float): the value of the clock, in seconds.

        """
        # Raw means it's immune even to NTP time adjustments.
        return time.clock_gettime(time.CLOCK_MONOTONIC_RAW)
Example #28
0
    def execute(self, context):
        try:
            from pyshmdata import Writer
        except:
            print("Module pyshmdata was not found")
            return {'FINISHED'}
            
        scene = bpy.context.scene
        splash = scene.splash

        splashTarget = Target()
        splashTarget._updatePeriodObject = splash.updatePeriodObject
        splashTarget._updatePeriodEdit = splash.updatePeriodEdit

        context = bpy.context
        if bpy.context.edit_object is not None:
            splashTarget._object = context.edit_object
        else:
            splashTarget._object = context.active_object

        # Mesh sending stuff
        if splashTarget._object.name not in Splash._targets:
            splashTarget._startTime = time.clock_gettime(time.CLOCK_REALTIME)
            splashTarget._frameTimeMesh = 0
            splashTarget._frameTimeTex = 0

            splash.targetObject = splashTarget._object.name

            path = splash.outputPathPrefix + "_mesh_" + splash.targetObject
            try:
                os.remove(path)
            except:
                pass

            splashTarget._meshWriter = Writer(path=path, datatype="application/x-polymesh")

            Splash._targets[splashTarget._object.name] = splashTarget

            if Splash.sendMesh not in bpy.app.handlers.scene_update_post:
                bpy.app.handlers.scene_update_post.append(Splash.sendMesh)

            splash.outputActive = True

        return {'FINISHED'}
Example #29
0
    def test_time_ns_type(self):
        def check_ns(sec, ns):
            self.assertIsInstance(ns, int)

            sec_ns = int(sec * 1e9)
            # tolerate a difference of 50 ms
            self.assertLess((sec_ns - ns), 50 ** 6, (sec, ns))

        check_ns(time.time(),
                 time.time_ns())
        check_ns(time.monotonic(),
                 time.monotonic_ns())
        check_ns(time.perf_counter(),
                 time.perf_counter_ns())
        check_ns(time.process_time(),
                 time.process_time_ns())

        if hasattr(time, 'clock_gettime'):
            check_ns(time.clock_gettime(time.CLOCK_REALTIME),
                     time.clock_gettime_ns(time.CLOCK_REALTIME))
Example #30
0
 def write_timed(self, message : str, timeout : float, clock_id : int = None):
     '''
     Broadcast a message a bus
     
     @param  message:str    The message to write, may not be longer than 2047 bytes
                            after UTF-8 encoding
     @param  timeout:float  The time the function shall fail with `os.errno.EAGAIN`,
                            if it has not already completed
     @param  clock_id:int?  The clock `timeout` is measured in, it must be a
                            predictable clock, if `None`, `timeout` is measured in
                            relative time instead of absolute time
     '''
     from native_bus import bus_write_timed_wrapped
     if clock_id is None:
         import time
         clock_id = time.CLOCK_MONOTONIC_RAW
         timeout += time.clock_gettime(clock_id)
     (r, e) = bus_write_timed_wrapped(self.bus, message, timeout, clock_id)
     if r == -1:
         raise self.__oserror(e)
Example #31
0
def now() -> float:
    """
    Get current time.
    """
    return time.clock_gettime(time.CLOCK_MONOTONIC)
Example #32
0
"""
Display all visible SSIDs
"""

import os
import NetworkManager
import dbus.mainloop.glib
import time

dbus.mainloop.glib.DBusGMainLoop(set_as_default=True)

os.system("nmcli device wifi list")

for dev in NetworkManager.NetworkManager.GetDevices():
    if dev.DeviceType != NetworkManager.NM_DEVICE_TYPE_WIFI:
        continue
#    dev.RequestScan({"ssid": "aay"})
    for ap in dev.GetAccessPoints():
        print('%-30s %dMHz %d%%' % (ap.Ssid, ap.Frequency, ap.Strength))
        print(ap.WpaFlags, ap.Flags, ap.RsnFlags,
              time.clock_gettime(time.CLOCK_BOOTTIME) - ap.LastSeen)
# Importing Libraries
import serial
import time
#import matplotlib.pyplot as plt
arduino = serial.Serial(port='COM4', baudrate=115200, timeout=.1)


def write_read(x):
    arduino.write(bytes(x, 'utf-8'))
    time.sleep(0.05)
    data = arduino.readline()
    return data


speed = input("Enter a motor speed: ")  # Taking input from user
distance = input("Enter a distance: ")
message = speed + ',' + distance + '\n'
arduino.write(message)
dictionary1 = {}
xlist = []
ylist = []
time.clock_gettime()
while True:
    value = arduino.read()
    splitList = value.split(' ')
    print(splitList)  # printing the value
    xlist.append(splitList(0))
    ylist.append(splitList(1))

#plt.plot(xlist, ylist)
def detect_events(sample_freq, queue):
    """
    Does FFT based event detection and plotting.

    :type sample_freq: int
    :param sample_freq: Sampling frequency in Hz
    :type queue: Multiprocessing Queue
    :param queue: detect_events() reads from this queue to acquire batches of IMU data captured by the acquire_data()
                  process.
    :return: None
    """

    BAR_WIDTH = 10
    BAR_SPACING = 15
    EDGE_OFFSET = 3
    GRAPH_SIZE = (500, 100)
    DATA_SIZE = (500, 100)

    graph = sg.Graph(GRAPH_SIZE, (0, 0), DATA_SIZE)

    # detect_events() blocks on the queue read, so data processing rate is driven by the rate that

    layout = [[sg.Image(filename='blank.png', key='image')], [graph],
              [sg.Button('Exit', size=(10, 1), font='Helvetica 14')]]

    # create the window and show it without the plot
    window = sg.Window('Demo Application - Scratching Detection',
                       location=(800, 400))

    window.Layout(layout).Finalize()

    while True:

        imu_data_dict = queue.get()

        # Check the timestamp on this data frame. If it's more than one second behind the current
        # CLOCK_MONOTONIC_RAW time, then detect_events() isn't keeping up with the incoming stream
        # of IMU measurements comming from acquire_data(). Jump back to the top of the while loop,
        # and get another data frame. We'll keep doing that until we're reading fresh data again.

        # TODO: the threshold here might need to be adjusted, and perhaps the whole queue should be cleared.

        if (time.clock_gettime(time.CLOCK_MONOTONIC_RAW) -
                imu_data_dict['timestamp']) > 1:

            continue

        df = pd.DataFrame(data=imu_data_dict['w_vel'][0], columns=['x'])

        # print("Number of NaN points: " + str(df.isna().sum()))

        # Doing a linear interpolation that replaces the NaN placeholders for desynced, dropped data
        # with a straight line between the last good data point before a dropout and the next good data
        # point. This does seem to improve the signal-to-noise ratio in the FFT.
        df = df.interpolate()

        yf = fftpack.rfft(df.loc[:, 'x'])

        # Is this a better way of getting the power spectrum than numpy.abs(yf?)
        yf = numpy.sqrt((yf * yf.conj()).real)

        xf = fftpack.rfftfreq((len(df.index)), 1. / sample_freq)

        peaks, peaks_dict = signal.find_peaks(yf, height=1)

        #print(xf[peaks], yf[peaks])
        #print(peaks, peaks_dict)

        # Check if any peak has been detected between 3 and 5 Hz, along the x axis. This is our rule for detecting
        # tooth-brushing behavior.

        event, values = window.Read(timeout=0, timeout_key='timeout')

        graph.Erase()

        for i in range(len(peaks_dict['peak_heights'])):
            graph_value = peaks_dict['peak_heights'][i]
            graph.DrawRectangle(
                top_left=(i * BAR_SPACING + EDGE_OFFSET, graph_value),
                bottom_right=(i * BAR_SPACING + EDGE_OFFSET + BAR_WIDTH, 0),
                fill_color='blue')
            graph.DrawText(text=graph_value,
                           location=(i * BAR_SPACING + EDGE_OFFSET + 25,
                                     graph_value + 10))

        if event == 'Exit' or event is None:
            sys.exit(0)

        # Scratching motion of puppet causes slow roll ( < 1 hz) along x axis.
        if numpy.where(peaks_dict['peak_heights'] > 800)[0].size > 0:
            window.FindElement('image').Update(filename='rat.png')
            print("Scratching Detected!")
        else:
            window.FindElement('image').Update(filename='blank.png')
def lock_timer(queue):
    while 1:
        start_time = time.clock_gettime(time.CLOCK_REALTIME)
        while time.clock_gettime(time.CLOCK_REALTIME) - start_time < (0.001):
            pass
        release_time = time.clock_gettime(time.CLOCK_REALTIME)
Example #36
0
vlist1.extend(vlist1)

print("precision = %0.9f seconds" %
      time.clock_getres(time.CLOCK_MONOTONIC_RAW))

stats = {'strings': [], 'pandas': [], 'nvstrings': []}
vlist = []
for i in range(50):
    #
    vlist.extend(vlist1)
    stats['strings'].append(len(vlist))
    #
    dstrs = nvstrings.to_device(vlist)
    hstrs = pd.Series(vlist)
    #
    st = time.clock_gettime(time.CLOCK_MONOTONIC_RAW)
    d = dstrs.find("#")
    et1 = (time.clock_gettime(time.CLOCK_MONOTONIC_RAW) - st)
    print("nvstrings.find() = %05f" % et1)
    stats['nvstrings'].append(et1)
    #
    st = time.clock_gettime(time.CLOCK_MONOTONIC_RAW)
    h = hstrs.str.find("#")
    et2 = (time.clock_gettime(time.CLOCK_MONOTONIC_RAW) - st)
    print("     pandas.find() = %05f" % et2)
    stats['pandas'].append(et2)
    print("speedup = %0.1fx" % (et2 / et1))
    #
    d = None
    h = None
Example #37
0
 def test_clock_realtime(self):
     time.clock_gettime(time.CLOCK_REALTIME)
Example #38
0
    return tf.reshape(output, [-1, 1, 1])


prediction = rnn(x)
cost = tf.reduce_mean(tf.abs(prediction - y))
optimizer = tf.train.AdamOptimizer(0.001).minimize(cost)

import matplotlib.pyplot as plt
import time

with tf.Session() as sess:
    sess.run(tf.global_variables_initializer())

    for epoch in range(EPOCHS):
        epoch_time = time.clock_gettime(0)
        # epoch_loss = 0
        for i in range(len(train_x) // BACTH_SIZE):
            epoch_x = train_x[i * BACTH_SIZE:(i + 1) * BACTH_SIZE]
            epoch_y = train_y[i * BACTH_SIZE:(i + 1) * BACTH_SIZE]

            sess.run([optimizer, cost], feed_dict={x: epoch_x, y: epoch_y})
            # _, c = sess.run([optimizer, cost], feed_dict={x: epoch_x, y: epoch_y})
            # epoch_loss += c

        c = sess.run(cost, feed_dict={x: val_x, y: val_y})
        print('Epoch', epoch + 1, 'completed out of', EPOCHS, 'loss:', c)
        print("Time spended for epoch:", time.clock_gettime(0) - epoch_time)

    predictions = []
    real = []
 def _time(self):
     return time.clock_gettime(time.CLOCK_MONOTONIC)
Example #40
0
    async def info(self):
        """
        Returns basic system information.
        """
        buildtime = sw_buildtime()
        if buildtime:
            buildtime = datetime.fromtimestamp(int(buildtime)),

        uptime = (await (await Popen(
            "env -u TZ uptime | awk -F', load averages:' '{ print $1 }'",
            stdout=subprocess.PIPE,
            shell=True,
        )).communicate())[0].decode().strip()

        serial = await self._system_serial()

        product = (await (await Popen(
            ['dmidecode', '-s', 'system-product-name'],
            stdout=subprocess.PIPE,
        )).communicate())[0].decode().strip() or None

        manufacturer = (await (await Popen(
            ['dmidecode', '-s', 'system-manufacturer'],
            stdout=subprocess.PIPE,
        )).communicate())[0].decode().strip() or None

        return {
            'version':
            self.version(),
            'buildtime':
            buildtime,
            'hostname':
            socket.gethostname(),
            'physmem':
            sysctl.filter('hw.physmem')[0].value,
            'model':
            sysctl.filter('hw.model')[0].value,
            'cores':
            sysctl.filter('hw.ncpu')[0].value,
            'loadavg':
            os.getloadavg(),
            'uptime':
            uptime,
            'uptime_seconds':
            time.clock_gettime(5),  # CLOCK_UPTIME = 5
            'system_serial':
            serial,
            'system_product':
            product,
            'license':
            await self.__get_license(),
            'boottime':
            datetime.fromtimestamp(
                struct.unpack('l',
                              sysctl.filter('kern.boottime')[0].value[:8])[0]),
            'datetime':
            datetime.utcnow(),
            'timezone':
            (await self.middleware.call('datastore.config',
                                        'system.settings'))['stg_timezone'],
            'system_manufacturer':
            manufacturer,
        }
Example #41
0
#!/usr/bin/env python3
import sys
import time
import subprocess

if __name__=='__main__':
  start_time = time.clock_gettime(time.CLOCK_MONOTONIC)
  org_time = start_time
  values = []
  timeout_periods = 0
  while len(values) < 5:
    # Use read-temp-sample.py to get a tuple of temp, humidity
    proc = subprocess.Popen("python3 read-temp-sample.py", stdout=subprocess.PIPE, stderr=subprocess.STDOUT, shell=True)
    thstr = proc.communicate()[0].decode('utf-8')
    #print('Raw: /{}/'.format(thstr.strip()))
    #sys.stdout.flush()
    tempstr, humstr = thstr.strip().split()
    if not tempstr == "no-temp":
      values.append((float(tempstr), float(humstr)))
      print('Sample[{2}]: {0:0.1f}C  {1:0.1f}%'.format(float(tempstr), float(humstr), len(values)), file=sys.stderr)
      sys.stderr.flush()
      if len(values) >= 5:
        break
    # Wait 10s before next sample
    time.sleep(3)
    cur_time = time.clock_gettime(time.CLOCK_MONOTONIC)
    if cur_time - start_time > 60.0:
        timeout_periods = timeout_periods + 1
        print('Timeout @{0:0.0f}s, {1} samples read'.format(cur_time - org_time,len(values)), file=sys.stderr)
        start_time = cur_time
        sys.stderr.flush()
Example #42
0
def acquire_data(sample_freq, capture_window, queue):
    """
    Acquires accelerometer data and stores it in arrays.

    :type sample_freq: int
    :param sample_freq: Sampling frequency in Hz
    :type capture_window: int
    :param capture_window: the number of data points to capture before writing to the queue. If acquire_data()
                           happens to desync at the end of the capture window, it may return more data points
                           than requested.
    :type queue: Multiprocessing Queue
    :param queue: A multiprocessing queue that allows a dict containing a captured batch of measurements to be
                  passed to detect_events().
    :return: None
    """

    print("Starting data capture...")

    accel = [[] for i in range(3)]
    w_vel = [[] for i in range(3)]
    mag_angle = [[] for i in range(3)]
    desync_count = 0
    capture_count = 0

    with serial.Serial('/dev/rfcomm0', 115200, timeout=100) as ser:

        # Once we've opened the serial port, do a first alignment and
        # initialize last_realtime from the hardware clock.

        synced, a_raw = sync_to_uq(ser)
        last_realtime = time.clock_gettime(time.CLOCK_MONOTONIC_RAW)

        # Outer loop captures until process is terminated
        while True:

            # If this is the first iteration or we desynced on the last iteration, realign to the next 'UQ' header.
            if not synced:
                # Wait on align to sync. TODO: Alignment should have a timeout for squawk-and-stop.
                synced, a_raw = sync_to_uq(ser)

                a_1_int = int.from_bytes(a_raw[0:2], byteorder='little', signed=True)
                a_2_int = int.from_bytes(a_raw[2:4], byteorder='little', signed=True)
                a_3_int = int.from_bytes(a_raw[4:6], byteorder='little', signed=True)
            # Otherwise just attempt to read the 'UQ' frame and get the accel data.
            else:
                s = ser.read(11)
                if s[0:2] != b'UQ':
                    # Commenting out these desyncs, because we already know we are getting desynced in
                    # nominal operation. NaN counting statistics in detect_events() give roughly equivalent
                    # diagnostic information, and don't cause the real time performance hit of writing
                    # to stdout.

                    # print("Desynced! " + str(s[0:2]))
                    synced = False
                    continue
                else:
                    a_1_int = int.from_bytes(s[2:4], byteorder='little', signed=True)
                    a_2_int = int.from_bytes(s[4:6], byteorder='little', signed=True)
                    a_3_int = int.from_bytes(s[6:8], byteorder='little', signed=True)

            # Read the 'UR' frame and get the gyro angular velocity data.
            s = ser.read(11)
            if s[0:2] != b'UR':
                # print("Desynced! " + str(s[0:2]))
                synced = False
                desync_count += 1
                continue
            else:
                w_1_int = int.from_bytes(s[2:4], byteorder='little', signed=True)
                w_2_int = int.from_bytes(s[4:6], byteorder='little', signed=True)
                w_3_int = int.from_bytes(s[6:8], byteorder='little', signed=True)

            # Read the 'US' frame and get the magnetometer angles.
            s = ser.read(11)
            if s[0:2] != b'US':
                # print("Desynced! " + str(s[0:2]))
                synced = False
                desync_count += 1
                continue
            else:
                angle_1_int = int.from_bytes(s[2:4], byteorder='little', signed=True)
                angle_2_int = int.from_bytes(s[4:6], byteorder='little', signed=True)
                angle_3_int = int.from_bytes(s[6:8], byteorder='little', signed=True)

            # I haven't been able to figure out what the data is in the 'UT' frame
            s = ser.read(11)
            if __name__ == '__main__':
                if s[0:2] != b'UT':
                    # print("Desynced! " + str(s[0:2]))
                    synced = False
                    desync_count += 1
                    continue

            # We made it to the end of one complete frame, so we are assuming that this is a good data point.
            # Before capturing this data, we need to check if a desync has caused data dropout. If so, we'll
            # fill with NaN.

            realtime = time.clock_gettime(time.CLOCK_MONOTONIC_RAW)

            # If the elapsed time since we last captured a good data point is greater than the BWT901CL's
            # sampling period, then we lost one or more data points to a desync.

            time_delta = int(round((realtime - last_realtime) / (1.0 / sample_freq)))

            # ...and update last_realtime to the current realtime, for use on the next cycle.

            last_realtime = realtime

            # If we lost data, append NaN's for the missing data points, then append the new good data point.
            # If we didn't lose data, just append the new good data point.

            if time_delta > 1:

                # print("DEBUG: time_delta: " + str(time_delta))

                for i in range(time_delta - 1):

                    # Increment the total count of data points, including dropped data.
                    capture_count += 1

                    for idx in range(3):

                        accel[idx].append(math.nan)
                        w_vel[idx].append(math.nan)
                        mag_angle[idx].append(math.nan)

            # Increment the total count of data points by one for our new good data point.
            capture_count += 1

            for idx, value in enumerate([a_1_int, a_2_int, a_3_int]):
                accel[idx].append(value / 32768.0 * 16)

            for idx, value in enumerate([w_1_int, w_2_int, w_3_int]):
                w_vel[idx].append(value / 32768.0 * 2000)

            for idx, value in enumerate([angle_1_int, angle_2_int, angle_3_int]):
                mag_angle[idx].append(value / 32768.0 * 180)

            if capture_count >= capture_window:

                utc_time = datetime.datetime.now()

                capture_count = 0
                window_dict = {'accel x': accel[0],
                               'accel y': accel[1],
                               'accel z': accel[2],
                               'w_vel 0': w_vel[0],
                               'w_vel 1': w_vel[1],
                               'w_vel 2': w_vel[2],
                               'mag_angle 0': mag_angle[0],
                               'mag_angle 1': mag_angle[1],
                               'mag_angle 2': mag_angle[2],
                               'timestamp': time.clock_gettime(time.CLOCK_MONOTONIC_RAW),
                               'datetime' : utc_time.isoformat(' '),
                               'desync_count': desync_count}

                # Uncomment this to print the raw x-axis accel values for debugging.
                # print(accel[0])

                # Clear the temporary measurement buffers.

                accel = [[] for i in range(3)]
                w_vel = [[] for i in range(3)]
                mag_angle = [[] for i in range(3)]
                queue.put(window_dict)
Example #43
0
    def sendMesh(scene):
        context = bpy.context

        if bpy.context.edit_object is not None:
            currentObject = context.edit_object.name
        else:
            currentObject = context.active_object.name

        for name, target in Splash._targets.items():
            currentTime = time.clock_gettime(time.CLOCK_REALTIME) - target._startTime
            worldMatrix = target._object.matrix_world

            normalMatrix = worldMatrix.copy()
            normalMatrix.invert()
            normalMatrix.transpose()

            if currentObject == name and bpy.context.edit_object is not None:
                if currentTime - target._frameTimeMesh < target._updatePeriodEdit:
                    continue
                target._frameTimeMesh = currentTime

                mesh = bmesh.from_edit_mesh(target._object.data)
                bufferVert = bytearray()
                bufferPoly = bytearray()
                buffer = bytearray()

                vertNbr = 0
                polyNbr = 0

                uv_layer = mesh.loops.layers.uv.active
                if uv_layer is None:
                    bpy.ops.uv.smart_project()
                    uv_layer = mesh.loops.layers.uv.active

                for face in mesh.faces:
                    polyNbr += 1
                    bufferPoly += struct.pack("i", len(face.verts))
                    for loop in face.loops:
                        bufferPoly += struct.pack("i", vertNbr)

                        v = loop.vert.co
                        tmpVector = Vector((v[0], v[1], v[2], 1.0))
                        tmpVector = worldMatrix * tmpVector
                        v = Vector((tmpVector[0], tmpVector[1], tmpVector[2]))

                        n = loop.vert.normal
                        tmpVector = Vector((n[0], n[1], n[2], 0.0))
                        tmpVector = normalMatrix * tmpVector
                        n = Vector((tmpVector[0], tmpVector[1], tmpVector[2]))

                        if uv_layer is None:
                            uv = Vector((0, 0))
                        else:
                            uv = loop[uv_layer].uv
                        bufferVert += struct.pack("ffffffff", v[0], v[1], v[2], uv[0], uv[1], n[0], n[1], n[2])
                        vertNbr += 1

                buffer += struct.pack("ii", vertNbr, polyNbr)
                buffer += bufferVert
                buffer += bufferPoly
                target._meshWriter.push(buffer, floor(currentTime * 1e9))
            else:
                if currentTime - target._frameTimeMesh < target._updatePeriodObject:
                    continue
                target._frameTimeMesh = currentTime

                if type(target._object.data) is bpy.types.Mesh:

                    # Look for UV coords, create them if needed
                    if len(target._object.data.uv_layers) == 0:
                        bpy.ops.object.editmode_toggle()
                        bpy.ops.uv.smart_project()
                        bpy.ops.object.editmode_toggle()

                    # Apply the modifiers to the object
                    mesh = target._object.to_mesh(context.scene, True, 'PREVIEW')

                    bufferVert = bytearray()
                    bufferPoly = bytearray()
                    buffer = bytearray()

                    vertNbr = 0
                    polyNbr = 0

                    for poly in mesh.polygons:
                        polyNbr += 1
                        bufferPoly += struct.pack("i", len(poly.loop_indices))
                        for idx in poly.loop_indices:
                            bufferPoly += struct.pack("i", vertNbr)

                            v = mesh.vertices[mesh.loops[idx].vertex_index].co
                            tmpVector = Vector((v[0], v[1], v[2], 1.0))
                            tmpVector = worldMatrix * tmpVector
                            v = Vector((tmpVector[0], tmpVector[1], tmpVector[2]))

                            n = mesh.vertices[mesh.loops[idx].vertex_index].normal
                            tmpVector = Vector((n[0], n[1], n[2], 0.0))
                            tmpVector = normalMatrix * tmpVector
                            n = Vector((tmpVector[0], tmpVector[1], tmpVector[2]))

                            if len(mesh.uv_layers) != 0:
                                uv = mesh.uv_layers[0].data[idx].uv
                            else:
                                uv = Vector((0, 0))
                            bufferVert += struct.pack("ffffffff", v[0], v[1], v[2], uv[0], uv[1], n[0], n[1], n[2])
                            vertNbr += 1

                    buffer += struct.pack("ii", vertNbr, polyNbr)
                    buffer += bufferVert
                    buffer += bufferPoly
                    target._meshWriter.push(buffer, floor(currentTime * 1e9))

                    bpy.data.meshes.remove(mesh)
Example #44
0
 def _getTime(self):
     return time.clock_gettime(time.CLOCK_REALTIME)
import time, vlc
import sys

print("Start python synchronized play")

#print(f"Got arg count {len(sys.argv)} args {sys.argv}")
#if len(sys.argv) > 1:
#    time_arg = sys.argv[1]
#    parsed = time.strptime(time_arg, "%H:%M:%S")
#    print(f"Starting play back at time {parsed}")

# gets the time in ns
now = time.clock_gettime_ns(time.CLOCK_REALTIME)

# get the time in seconds
now_sec = time.clock_gettime(time.CLOCK_REALTIME)

# get second in range 0 to 59
floor_minute = int(now_sec % 60)

#print(f"Got time now {now/1000000000} now_sec {now_sec} floor_minute {floor_minute}")

source = "Beethoven - Moonlight Sonata (FULL).mp3"

vlc_instance = vlc.Instance()

player = vlc_instance.media_player_new()

media = vlc_instance.media_new(source)

player.set_media(media)
Example #46
0
def _os_uptime():
    """ Get the OS uptime. Because this is highly operating system dependent, several different
    strategies may have to be tried:"""

    try:
        # For Python 3.7 and later, most systems
        return time.clock_gettime(time.CLOCK_UPTIME)
    except AttributeError:
        pass

    try:
        # For Python 3.3 and later, most systems
        return time.clock_gettime(time.CLOCK_MONOTONIC)
    except AttributeError:
        pass

    try:
        # For Linux, Python 2 and 3:
        return float(open("/proc/uptime").read().split()[0])
    except (IOError, KeyError, OSError):
        pass

    try:
        # For MacOS, Python 2:
        from Quartz.QuartzCore import CACurrentMediaTime
        return CACurrentMediaTime()
    except ImportError:
        pass

    try:
        # for FreeBSD, Python 2
        import ctypes
        from ctypes.util import find_library

        libc = ctypes.CDLL(find_library('c'))
        size = ctypes.c_size_t()
        buf = ctypes.c_int()
        size.value = ctypes.sizeof(buf)
        libc.sysctlbyname("kern.boottime", ctypes.byref(buf), ctypes.byref(size), None, 0)
        os_uptime_secs = time.time() - float(buf.value)
        return os_uptime_secs
    except (ImportError, AttributeError, IOError, NameError):
        pass

    try:
        # For OpenBSD, Python 2. See issue #428.
        import subprocess
        from datetime import datetime
        cmd = ['sysctl', 'kern.boottime']
        proc = subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
        o, e = proc.communicate()
        # Check for errors
        if e:
            raise IOError
        time_t = o.decode('ascii').split()
        time_as_string = time_t[1] + " " + time_t[2] + " " + time_t[4][:4] + " " + time_t[3]
        os_time = datetime.strptime(time_as_string, "%b %d %Y %H:%M:%S")
        epoch_time = (os_time - datetime(1970, 1, 1)).total_seconds()
        os_uptime_secs = time.time() - epoch_time
        return os_uptime_secs
    except (IOError, IndexError, ValueError):
        pass

    # Nothing seems to be working. Return None
    return None
 def checkPoll(self):
     if time.clock_gettime(0) >= self.next_due and not self.disabled:
         self.poll()
         self.next_due = time.clock_gettime(0) + self.rate
Example #48
0
 def test_clock_realtime(self):
     t = time.clock_gettime(time.CLOCK_REALTIME)
     self.assertIsInstance(t, float)
Example #49
0
    def exec_command(self, cmd, in_data=None, sudoable=True):
        """ execute a command on the virtual machine host """
        super(Connection, self).exec_command(cmd,
                                             in_data=in_data,
                                             sudoable=sudoable)

        self._display.vvv(u"EXEC {0}".format(cmd), host=self._host)

        cmd_args_list = shlex.split(
            to_native(cmd, errors='surrogate_or_strict'))

        if getattr(self._shell, "_IS_WINDOWS", False):
            # Become method 'runas' is done in the wrapper that is executed,
            # need to disable sudoable so the bare_run is not waiting for a
            # prompt that will not occur
            sudoable = False

            # Generate powershell commands
            cmd_args_list = self._shell._encode_script(cmd,
                                                       as_list=True,
                                                       strict_mode=False,
                                                       preserve_rc=False)

        # TODO(odyssey4me):
        # Implement buffering much like the other connection plugins
        # Implement 'env' for the environment settings
        # Implement 'input-data' for whatever it might be useful for
        request_exec = {
            'execute': 'guest-exec',
            'arguments': {
                'path': cmd_args_list[0],
                'capture-output': True,
                'arg': cmd_args_list[1:]
            }
        }
        request_exec_json = json.dumps(request_exec)

        display.vvv(u"GA send: {0}".format(request_exec_json), host=self._host)

        # TODO(odyssey4me):
        # Add timeout parameter
        result_exec = json.loads(
            libvirt_qemu.qemuAgentCommand(self.domain, request_exec_json, 5,
                                          0))

        display.vvv(u"GA return: {0}".format(result_exec), host=self._host)

        command_start = time.clock_gettime(time.CLOCK_MONOTONIC)

        request_status = {
            'execute': 'guest-exec-status',
            'arguments': {
                'pid': result_exec['return']['pid']
            }
        }
        request_status_json = json.dumps(request_status)

        display.vvv(u"GA send: {0}".format(request_status_json),
                    host=self._host)

        # TODO(odyssey4me):
        # Work out a better way to wait until the command has exited
        result_status = json.loads(
            libvirt_qemu.qemuAgentCommand(self.domain, request_status_json, 5,
                                          0))

        display.vvv(u"GA return: {0}".format(result_status), host=self._host)

        while not result_status['return']['exited']:
            # Wait for 5% of the time already elapsed
            sleep_time = (time.clock_gettime(time.CLOCK_MONOTONIC) -
                          command_start) * (5 / 100)
            if sleep_time < 0.0002:
                sleep_time = 0.0002
            elif sleep_time > 1:
                sleep_time = 1
            time.sleep(sleep_time)
            result_status = json.loads(
                libvirt_qemu.qemuAgentCommand(self.domain, request_status_json,
                                              5, 0))

        display.vvv(u"GA return: {0}".format(result_status), host=self._host)

        if result_status['return'].get('out-data'):
            stdout = base64.b64decode(result_status['return']['out-data'])
        else:
            stdout = b''

        if result_status['return'].get('err-data'):
            stderr = base64.b64decode(result_status['return']['err-data'])
        else:
            stderr = b''

        # Decode xml from windows
        if getattr(self._shell, "_IS_WINDOWS",
                   False) and stdout.startswith(b"#< CLIXML"):
            stdout = _parse_clixml(stdout)

        display.vvv(u"GA stdout: {0}".format(to_text(stdout)), host=self._host)
        display.vvv(u"GA stderr: {0}".format(to_text(stderr)), host=self._host)

        return result_status['return']['exitcode'], stdout, stderr
Example #50
0
 def test_clock_monotonic(self):
     a = time.clock_gettime(time.CLOCK_MONOTONIC)
     b = time.clock_gettime(time.CLOCK_MONOTONIC)
     self.assertLessEqual(a, b)
Example #51
0
def uptime() -> float:
    return time.clock_gettime(time.CLOCK_UPTIME)  # pylint: disable=no-member
Example #52
0
 def _get_clock(self):
     if self._clock_func is not None:
         return self._clock_func()
     else:
         return time.clock_gettime(time.CLOCK_REALTIME)
Example #53
0
 def now():
     if any([x in sys.platform for x in UNIX_OS_OPTS]):
         return time.clock_gettime(time.CLOCK_MONOTONIC)
     elif sys.platform == "win32":
         return time.monotonic()
     raise BaseException("Could not determine OS.")
Example #54
0
def get_time():
    return clock_gettime(CLOCK_MONOTONIC)
Example #55
0
 def __call__(self):
     return time.clock_gettime(time.CLOCK_THREAD_CPUTIME_ID)
Example #56
0
 def monotonicRaw():
     return time.clock_gettime(time.CLOCK_MONOTONIC_RAW)
Example #57
0
def mysql(*, sql_setup: Optional[str] = None):
    """
    Start a MySQL container and yield the IP of the container once ready for
    connections. ``sql_setup`` should be the .sql file used to initialize the
    database.
    """
    if not DOCKER_AVAILABLE:
        raise unittest.SkipTest("Need docker to run MySQL")

    docker_client: docker.DockerClient = docker.from_env()
    with tempfile.TemporaryDirectory() as tempdir:
        # Create server config
        sql_conf_path = pathlib.Path(tempdir, "my.conf")
        sql_conf_path.write_text(MY_CONF)
        sql_conf_path.chmod(0o660)
        # Create cert folder
        cert_dir_path = pathlib.Path(tempdir, "certs")
        cert_dir_path.mkdir(mode=0o755)
        # Volumes to mount
        volumes = {
            sql_conf_path.resolve(): {
                "bind": "/etc/mysql/conf.d/ssl.cnf"
            },
            cert_dir_path.resolve(): {
                "bind": "/conf/certs/"
            },
        }
        # Dump out SQL query to file
        if sql_setup is not None:
            sql_setup_path = pathlib.Path(tempdir, "dump.sql")
            sql_setup_path.write_text(sql_setup)
            sql_setup_path.chmod(0o555)
            volumes[sql_setup_path.resolve()] = {
                "bind": "/docker-entrypoint-initdb.d/dump.sql"
            }
        # Tell the docker daemon to start MySQL
        LOGGER.debug("Starting MySQL...")
        container = docker_client.containers.run(
            DOCKER_IMAGE,
            command=
            "bash -c 'set -x; while ! test -f /conf/certs/ready; do sleep 0.1; done; chown mysql:mysql /etc/mysql/conf.d/ssl.cnf && chown mysql:mysql /conf/certs/* && ls -lAF /conf/certs/ && cat /conf/certs/server-*.pem && bash -xe /usr/local/bin/docker-entrypoint.sh mysqld'",
            environment=DOCKER_ENV,
            detach=True,
            auto_remove=True,
            volumes=volumes,
        )
        # Sometimes very bad things happen, this ensures that the container will
        # be cleaned up on process exit no matter what
        cleanup = mkcleanup(docker_client, container)
        try:
            # Get the IP from the docker daemon
            inspect = docker_client.api.inspect_container(container.id)
            container_ip = inspect["NetworkSettings"]["IPAddress"]
            # Create certificate
            # From: https://mariadb.com/kb/en/library/certificate-creation-with-openssl/
            cmds = [
                ["openssl", "genrsa", "-out", "ca-key.pem", "2048"],
                [
                    "openssl",
                    "req",
                    "-new",
                    "-x509",
                    "-nodes",
                    "-key",
                    "ca-key.pem",
                    "-out",
                    "ca.pem",
                    "-subj",
                    f"/C=US/ST=Oregon/L=Portland/O=Company Name/OU=Root Cert/CN=mysql.unittest",
                ],
                [
                    "openssl",
                    "req",
                    "-newkey",
                    "rsa:2048",
                    "-nodes",
                    "-keyout",
                    "server-key.pem",
                    "-out",
                    "server-req.pem",
                    "-subj",
                    f"/C=US/ST=Oregon/L=Portland/O=Company Name/OU=Server Cert/CN=mysql.unittest",
                ],
                [
                    "openssl",
                    "rsa",
                    "-in",
                    "server-key.pem",
                    "-out",
                    "server-key.pem",
                ],
                [
                    "openssl",
                    "x509",
                    "-req",
                    "-in",
                    "server-req.pem",
                    "-days",
                    "1",
                    "-CA",
                    "ca.pem",
                    "-CAkey",
                    "ca-key.pem",
                    "-set_serial",
                    "01",
                    "-out",
                    "server-cert.pem",
                ],
            ]
            for cmd in cmds:
                subprocess.call(cmd, cwd=cert_dir_path.resolve())
            root_cert_path = pathlib.Path(cert_dir_path, "ca.pem")
            server_cert_path = pathlib.Path(cert_dir_path, "server-cert.pem")
            server_key_path = pathlib.Path(cert_dir_path, "server-key.pem")
            server_cert_path.chmod(0o660)
            server_key_path.chmod(0o660)
            pathlib.Path(cert_dir_path, "ready").write_text("ready")
            # Wait until MySQL reports it's ready for connections
            container_start_time = time.clock_gettime(time.CLOCK_MONOTONIC_RAW)
            ready = 0
            for line in container.logs(stream=True, follow=True):
                now_time = time.clock_gettime(time.CLOCK_MONOTONIC_RAW)
                LOGGER.debug(
                    "MySQL log (%0.02f seconds): %s",
                    (now_time - container_start_time),
                    line.decode(errors="ignore").strip(),
                )
                if b"ready for connections" in line:
                    ready += 1
                if ready == 2:
                    break
            if ready != 2:
                raise MySQLFailedToStart('Never saw "ready for connections"')
            # Ensure that we can make a connection
            start_time = time.clock_gettime(time.CLOCK_MONOTONIC_RAW)
            max_timeout = float(os.getenv("MYSQL_START_TIMEOUT", "600"))
            LOGGER.debug(
                "Attempting to connect to MySQL: Timeout of %d seconds",
                max_timeout,
            )
            while not check_connection(container_ip, DEFAULT_PORT):
                end_time = time.clock_gettime(time.CLOCK_MONOTONIC_RAW)
                if (end_time - start_time) >= max_timeout:
                    raise MySQLFailedToStart("Timed out waiting for MySQL")
            end_time = time.clock_gettime(time.CLOCK_MONOTONIC_RAW)
            LOGGER.debug(
                "MySQL running: Took %0.02f seconds",
                end_time - container_start_time,
            )
            # Yield IP of container to caller
            yield container_ip, root_cert_path.resolve()
        finally:
            cleanup()
def get_uptime():
    uptime_seconds = float(time.clock_gettime(time.CLOCK_MONOTONIC))
    return uptime_seconds
# space mouse first
counter = 0  # Frame number
initCounter = 0  # For interpolating to target

# Moving
while True:

    if counter == 0 or initCounter == 1:
        msg_panda = b.recv_msg(robot_name + "_state", -1)
        msg_lidar = b_lidar.recv_msg("franka_lidar", -1)

    # msg_task = b.recv_msg("task_policy", -1)
    msg_panda = b.recv_msg(robot_name + "_state", 0)
    msg_lidar = b_lidar.recv_msg("franka_lidar", 0)

    recv_stop = time.clock_gettime(time.CLOCK_MONOTONIC)  # TODO

    current_c_pos = msg_panda.get_c_pos()
    current_c_vel = msg_panda.get_c_vel()
    current_j_vel = msg_panda.get_j_vel()
    current_j_pos = msg_panda.get_j_pos()

    lidar_data = msg_lidar.get_data()
    print(lidar_data)
    initial_pos = np.array([0.628, -0.002, 0.497])
    # target_c_vel = np.array([0.001,0.,0.]) #msg_task.get_c_acc_mean() * 0.1  # TODO magic scaling of input pos
    target_c_pos = initial_pos + np.array([0, -0.2, 0])
    # P control towards the target
    target_c_vel = 0.01 * (target_c_pos - current_c_pos)

    # Init after reset
Example #60
0
def mainprogram():
    #rightsensors = []
    path = []
    while True:
        sensorvalues = SensorReadingsArrayCreator()
        #print(sensorvalues)
        # how many sensors are on the path
        sensorsonpath = 0
        for i in range(1, 7):
            if sensorvalues[i] > midValue:
                sensorsonpath += 1
        if sensorsonpath == 1 or sensorsonpath == 2:
            # find sensor on path
            OnPathSensor = 1
            for i in range(2, 7):
                if sensorvalues[i] > sensorvalues[OnPathSensor]:
                    OnPathSensor = i
            OnPathSensor += 1
            if OnPathSensor == 3:  # close to middle left
                myMotor1.setSpeed(70)
                myMotor2.setSpeed(90)
            elif OnPathSensor == 6:  # close to middle right
                myMotor1.setSpeed(90)
                myMotor2.setSpeed(70)
            elif OnPathSensor == 2:  # far from middle left
                myMotor1.setSpeed(30)
                myMotor2.setSpeed(90)
            elif OnPathSensor == 7:  # far to middle right
                myMotor1.setSpeed(90)
                myMotor2.setSpeed(30)
            #elif OnPathSensor == 1:
            #   print("1 set")
            #  myMotor1.setSpeed(55)
            # myMotor2.setSpeed(90)
            #elif OnPathSensor == 8:
            #   print("8 set")
            #  myMotor1.setSpeed(90)
            # myMotor2.setSpeed(55)
            elif sensorvalues[3] >= midValue and sensorvalues[4] >= midValue:
                myMotor1.setSpeed(90)
                myMotor2.setSpeed(90)
            elif OnPathSensor == 4:
                myMotor1.setSpeed(80)
                myMotor2.setSpeed(90)
            elif OnPathSensor == 5:
                myMotor1.setSpeed(80)
                myMotor2.setSpeed(90)

        elif sensorvalues[1] >= midValue and sensorvalues[
                2] >= midValue and sensorvalues[3] >= midValue:
            time.sleep(0.35)
            sensorvalues = SensorReadingsArrayCreator()
            if sensorvalues[1] <= midValue and sensorvalues[
                    2] <= midValue and sensorvalues[
                        3] <= midValue and sensorvalues[
                            4] <= midValue and sensorvalues[
                                5] <= midValue and sensorvalues[6] <= midValue:
                path.append("L")
                myMotor2.setSpeed(90)
                myMotor1.setSpeed(0)
                #myMotor1.run(Adafruit_MotorHAT.BACKWARD)
                #myMotor1.setSpeed(70)
                while sensorvalues[4] <= midValue:
                    sensorvalues = SensorReadingsArrayCreator()
                    if sensorvalues[1] >= midValue:
                        #myMotor1.setSpeed(45)
                        myMotor2.setSpeed(50)
                        #print("now")
            #myMotor1.run(Adafruit_MotorHAT.FORWARD)
            elif sensorvalues[1] <= midValue and sensorvalues[6] <= midValue:
                path.append("L")
                #print("st")
                myMotor2.setSpeed(90)
                myMotor1.setSpeed(0)
                while sensorvalues[6] <= midValue:
                    sensorvalues = SensorReadingsArrayCreator()
                #print("out1")
                #myMotor1.run(Adafruit_MotorHAT.BACKWARD)
                #myMotor1.setSpeed(70)
                while sensorvalues[4] <= midValue:
                    sensorvalues = SensorReadingsArrayCreator()
                    if sensorvalues[1] >= midValue:
                        #myMotor1.setSpeed(45)
                        myMotor2.setSpeed(50)
                        #print("now")
            #myMotor1.run(Adafruit_MotorHAT.FORWARD)
            else:
                done(path)

        elif sensorvalues[7] >= midValue and sensorvalues[
                6] >= midValue and sensorvalues[5] >= midValue:
            #rightsensors.append(SensorReadingsArrayCreator())
            leftgo = 0
            starttime = time.clock_gettime(0)
            while time.clock_gettime(0) < (starttime + 0.35):
                sensorvalues = SensorReadingsArrayCreator()
                if sensorvalues[1] > midValue:
                    leftgo = 1
            #rightsensors.append(sensorvalues)
            #turnOffMotors()
            #print(rightsensors)
            #sys.exit()
            if leftgo == 1:
                path.append("L")
                if sensorvalues[1] <= midValue and sensorvalues[6] <= midValue:
                    myMotor2.setSpeed(90)
                    myMotor1.setSpeed(0)
                    #myMotor1.run(Adafruit_MotorHAT.BACKWARD)
                    #myMotor1.setSpeed(70)
                    while sensorvalues[4] <= midValue:
                        sensorvalues = SensorReadingsArrayCreator()
                        if sensorvalues[1] >= midValue:
                            #myMotor1.setSpeed(45)
                            myMotor2.setSpeed(50)
                            #print("now")
                #myMotor1.run(Adafruit_MotorHAT.FORWARD)
                else:
                    done(path)
            else:
                if sensorvalues[1] <= midValue and sensorvalues[
                        6] <= midValue and (sensorvalues[4] <= midValue
                                            and sensorvalues[5] <= midValue):
                    path.append("R")
                    #print("right")
                    myMotor1.setSpeed(90)
                    myMotor2.setSpeed(0)
                    #myMotor2.run(Adafruit_MotorHAT.BACKWARD)
                    #myMotor2.setSpeed(70)
                    while sensorvalues[3] <= midValue:
                        sensorvalues = SensorReadingsArrayCreator()
                        if sensorvalues[6] >= midValue:
                            #myMotor1.setSpeed(45)
                            myMotor1.setSpeed(50)
                            #print("now")
                #myMotor1.run(Adafruit_MotorHAT.FORWARD)
                elif sensorvalues[1] <= midValue and sensorvalues[
                        2] <= midValue and sensorvalues[
                            3] <= midValue and sensorvalues[
                                4] <= midValue and sensorvalues[
                                    5] <= midValue and sensorvalues[
                                        6] <= midValue:
                    done(path)
                else:
                    path.append("S")

        elif sensorsonpath == 0:
            myMotor1.setSpeed(90)
            myMotor2.run(Adafruit_MotorHAT.BACKWARD)
            myMotor2.setSpeed(90)
            while sensorvalues[3] <= midValue:
                sensorvalues = SensorReadingsArrayCreator()
                if sensorvalues[6] >= midValue:
                    myMotor1.setSpeed(45)
                    myMotor2.setSpeed(45)
                    #print("now")
            myMotor2.run(Adafruit_MotorHAT.FORWARD)
            path.append("B")