def test_clock_realtime(self): import time if not hasattr(time, 'clock_gettime'): skip("need time.clock_gettime()") t1 = time.clock_gettime(time.CLOCK_REALTIME) assert isinstance(t1, float) time.sleep(time.clock_getres(time.CLOCK_REALTIME)) t2 = time.clock_gettime(time.CLOCK_REALTIME) assert t1 != t2
def test_clock_monotonic(self): import time if not (hasattr(time, 'clock_gettime') and hasattr(time, 'CLOCK_MONOTONIC')): skip("need time.clock_gettime()/CLOCK_MONOTONIC") t1 = time.clock_gettime(time.CLOCK_MONOTONIC) assert isinstance(t1, float) time.sleep(time.clock_getres(time.CLOCK_MONOTONIC)) t2 = time.clock_gettime(time.CLOCK_MONOTONIC) assert t1 < t2
def test_clock_realtime_ns(self): import time if not hasattr(time, 'clock_gettime_ns'): skip("need time.clock_gettime_ns()") t1 = time.clock_gettime_ns(time.CLOCK_REALTIME) assert isinstance(t1, int) time.sleep(time.clock_getres(time.CLOCK_REALTIME)) t2 = time.clock_gettime_ns(time.CLOCK_REALTIME) assert t1 != t2 assert abs(time.clock_gettime(time.CLOCK_REALTIME) - time.clock_gettime_ns(time.CLOCK_REALTIME) * 1e-9) < 0.1
def test_clock_monotonic_ns(self): import time if not (hasattr(time, 'clock_gettime_ns') and hasattr(time, 'CLOCK_MONOTONIC')): skip("need time.clock_gettime()/CLOCK_MONOTONIC") t1 = time.clock_gettime_ns(time.CLOCK_MONOTONIC) assert isinstance(t1, int) time.sleep(time.clock_getres(time.CLOCK_MONOTONIC)) t2 = time.clock_gettime_ns(time.CLOCK_MONOTONIC) assert t1 < t2 assert abs(time.clock_gettime(time.CLOCK_MONOTONIC) - time.clock_gettime_ns(time.CLOCK_MONOTONIC) * 1e-9) < 0.1
def do_GET(self): resolution = time.clock_getres(time.CLOCK_MONOTONIC) do_hash = False if 't=' in self.path and self.path.split('t=', 1)[1] == '1': do_hash = True received = int(time.monotonic() / resolution) if do_hash: x = hashlib.md5(b'01234567').digest() now = int(time.monotonic() / resolution) self.send_response(200) self.send_header('Content-Type', 'text/plain; charset=UTF-8') self.end_headers() content = "waited: %d\n" % (now - received) self.wfile.write(content.encode('utf-8')) self.wfile.flush()
def do_GET(self): resolution = time.clock_getres(time.CLOCK_MONOTONIC) do_hash = False if 't=' in self.path and self.path.split('t=', 1)[1] == '1': do_hash = True received = int(time.monotonic()/resolution) if do_hash: x = hashlib.md5(b'01234567').digest() now = int(time.monotonic()/resolution) self.send_response(200) self.send_header('Content-Type','text/plain; charset=UTF-8') self.end_headers() content = "waited: %d\n" % (now - received) self.wfile.write(content.encode('utf-8')) self.wfile.flush()
def do_GET(self): resolution = time.clock_getres(time.CLOCK_MONOTONIC) do_parse = False if 't=' in self.path and self.path.split('t=', 1)[1] == '1': do_parse = True received = int(time.monotonic()/resolution) if do_parse: x = json.loads('[{"sampler_rtt": 58998274, "test_case": "short", "requests_rtt": 58528000, "sample": 32, "unix_time": 1425061969829780736, "local_port": 45387, "order": 0}, {"sampler_rtt": 59420191, "test_case": "long", "requests_rtt": 58036000, "sample": 32, "unix_time": 1425061969829780736, "local_port": 45386, "order": 1}]') now = int(time.monotonic()/resolution) self.send_response(200) self.send_header('Content-Type','text/plain; charset=UTF-8') self.end_headers() content = "waited: %d\n" % (now - received) self.wfile.write(content.encode('utf-8')) self.wfile.flush()
def do_GET(self): resolution = time.clock_getres(time.CLOCK_MONOTONIC) do_parse = False if 't=' in self.path and self.path.split('t=', 1)[1] == '1': do_parse = True received = int(time.monotonic() / resolution) if do_parse: x = json.loads( '[{"sampler_rtt": 58998274, "test_case": "short", "requests_rtt": 58528000, "sample": 32, "unix_time": 1425061969829780736, "local_port": 45387, "order": 0}, {"sampler_rtt": 59420191, "test_case": "long", "requests_rtt": 58036000, "sample": 32, "unix_time": 1425061969829780736, "local_port": 45386, "order": 1}]' ) now = int(time.monotonic() / resolution) self.send_response(200) self.send_header('Content-Type', 'text/plain; charset=UTF-8') self.end_headers() content = "waited: %d\n" % (now - received) self.wfile.write(content.encode('utf-8')) self.wfile.flush()
def test_clock_gettime(self): import time clock_ids = ['CLOCK_REALTIME', 'CLOCK_REALTIME_COARSE', 'CLOCK_MONOTONIC', 'CLOCK_MONOTONIC_COARSE', 'CLOCK_MONOTONIC_RAW', 'CLOCK_BOOTTIME', 'CLOCK_PROCESS_CPUTIME_ID', 'CLOCK_THREAD_CPUTIME_ID', 'CLOCK_HIGHRES', 'CLOCK_PROF', 'CLOCK_UPTIME',] for clock_id in clock_ids: clock = getattr(time, clock_id, None) if clock is None: continue t1 = time.clock_gettime(clock) assert isinstance(t1, float) time.sleep(time.clock_getres(clock)) t2 = time.clock_gettime(clock) assert t1 < t2
def __init__(self, start: Optional[int]) -> None: resolution = clock_getres(CLOCK_MONOTONIC_RAW) msg = ( "The monotonic clock must have microsecond resolution. This is " "necessary because multiple state changes can be written on the same " "millisecond.") assert resolution <= 1e-06, msg current_time = time_ns() if start is None or start < current_time: start = current_time self._previous_timestamp = start self._previous_monotonic = clock_gettime_ns(CLOCK_MONOTONIC_RAW) self._lock = Semaphore() msg = ( "Consecutive calls to `new()` must not return the same timestamp. " "Most likely the monotonic clock resolution on this system is too low." ) assert self.new().timestamp != self.new().timestamp, msg
def do_GET(self): resolution = time.clock_getres(time.CLOCK_MONOTONIC) conn = None if 't=' in self.path: t = self.path.split('t=', 1)[1] if t == '1': conn = memc elif t == '2': conn = diskc received = int(time.monotonic()/resolution) if conn != None: x = conn.execute("SELECT * FROM user WHERE username='******'") now = int(time.monotonic()/resolution) self.send_response(200) self.send_header('Content-Type','text/plain; charset=UTF-8') self.end_headers() content = "waited: %d\n" % (now - received) self.wfile.write(content.encode('utf-8')) self.wfile.flush()
import pandas as pd import nvstrings import time #df = pd.read_csv('/home/jovyan/reviews-1m.csv', sep=',') #values = df["text"].values #vlist = values.tolist() print("precision = %0.9f seconds" % time.clock_getres(time.CLOCK_MONOTONIC_RAW)) for i in range(3): lines = (i+1) * 1000000 #vlist.extend(vlist) #print("strings:",len(vlist)) # #dstrs = nvstrings.to_device(vlist) dstrs = nvstrings.from_csv("/home/jovyan/reviews.txt",0,lines=lines) vlist = dstrs.to_host() print("strings = ",len(vlist)) hstrs = pd.Series(vlist) # st = time.clock_gettime(time.CLOCK_MONOTONIC_RAW) d = dstrs.slice(3,103) et1 = (time.clock_gettime(time.CLOCK_MONOTONIC_RAW) - st) print("nvstrings.slice() = %05f" % et1) # st = time.clock_gettime(time.CLOCK_MONOTONIC_RAW) h = hstrs.str.slice(3,103) et2 = (time.clock_gettime(time.CLOCK_MONOTONIC_RAW) - st) print(" pandas.slice() = %05f" % et2)
logging.debug('local socket: %s:%d' % s.getsockname()) # drop privileges try: uid = 100000000 + 100000 * random.randint(0, 999) + os.getpid() os.setgid(uid) os.setuid(uid) except OSError: uid = 10000 + random.randint(0, 9999) os.setgid(uid) os.setuid(uid) logging.debug('UID/GID set to %d' % (uid)) # get the precision try: hz = int(1 / time.clock_getres(time.CLOCK_REALTIME)) except AttributeError: hz = 1000000000 precision = 0 while hz > 1: precision -= 1; hz >>= 1 while True: try: # receive the query data, addr = s.recvfrom(struct.calcsize(NTPFORMAT)) serverrecv = s2n(time.time()) if len(data) != struct.calcsize(NTPFORMAT): raise Exception("Invalid NTP packet: packet too short: %d bytes" % (len(data))) try:
vlist = dstrs_in.to_host() vlist.extend(vlist) vlist.extend(vlist) vlist.extend(vlist) vlist.extend(vlist) vlist.extend(vlist) vlist.extend(vlist) vlist.extend(vlist) vlist.extend(vlist) len(vlist) dstrs = nvstrings.to_device(vlist) hstrs = pd.Series(vlist) print("precision = %0.9f seconds" % time.clock_getres(time.CLOCK_MONOTONIC_RAW)) print("strings =", dstrs.size()) # st = time.clock_gettime(time.CLOCK_MONOTONIC_RAW) d = dstrs.contains('@.+@') et1 = (time.clock_gettime(time.CLOCK_MONOTONIC_RAW) - st) print("nvstrings.contains('@.+@') = %05f" % et1) st = time.clock_gettime(time.CLOCK_MONOTONIC_RAW) h = hstrs.str.contains('@.+@') et2 = (time.clock_gettime(time.CLOCK_MONOTONIC_RAW) - st) print("pandas.contains('@.+@') = %05f" % et2) print("speedup = %0.1fx" % (et2 / et1)) # st = time.clock_gettime(time.CLOCK_MONOTONIC_RAW)
import sys import time import os if len(sys.argv) < 3: print("Usage: create_file.py <filename> <size in MiB>") exit() file = open(sys.argv[1], 'wb') size = int(sys.argv[2]) start = time.clock_gettime(time.CLOCK_REALTIME) for i in range(size): #Each MiB can fit 1048576 bytes ##file.write(bytes(1048576)) file.write(os.urandom(1048576)) file.close() end = time.clock_gettime(time.CLOCK_REALTIME) print("Time: " + str(end - start)) print("Resolution: " + str(time.clock_getres(time.CLOCK_REALTIME)) + "s")
`python -m flowcorder.daemons.transition` """ import datetime import ipaddress import time import logging from enum import IntEnum from ..ipfix import Template from ..routing import RouteResolver, NoRoute, InternalFlow LOG = logging.getLogger(__name__) RRESOLVER = RouteResolver() CLK_REZ = time.clock_getres(time.CLOCK_MONOTONIC) class FLOW_STATES(IntEnum): """ The lifecycle of an individual flow can have one of these state. :NEW: Connection initiation attempted :ESTABLISHED: The connection is established and exchanging data :FINISHED: The connection is over :BROKEN: The connection failed (e.g., protocol error) :UNREACHABLE: The connection end point can no longer be reached (e.g., network error, time out) :DESYNC: The instrumetation stack lost track of this flow. """
def check_timer(): precision = time.clock_getres(time.CLOCK_MONOTONIC) return precision <= 1e-4
#!/usr/bin/env python from sys import argv, stderr import re from time import clock_getres, clock_gettime, CLOCK_MONOTONIC from rdflib import Graph, RDF, URIRef assert clock_getres(CLOCK_MONOTONIC) == 1e-9 VMSIZE_RE = re.compile(r"VmSize\s*:\s*([0-9]*)") def get_vmsize(): with open("/proc/self/status") as f: txt = f.read() return int(VMSIZE_RE.search(txt).group(1)) def get_time(): return clock_gettime(CLOCK_MONOTONIC) def task_parse(): raise NotImplementedError() def task_query(): filename = argv[2] syntax = argv[3] if len(argv) > 3 else "nt" m0 = get_vmsize() t0 = get_time()
def test_clock_getres(self): res = time.clock_getres(time.CLOCK_REALTIME) self.assertGreater(res, 0.0) self.assertLessEqual(res, 1.0)
clocks = ['clock', 'perf_counter', 'process_time'] if hasattr(time, 'monotonic'): clocks.append('monotonic') clocks.append('time') for name in clocks: func = getattr(time, name) test_clock("%s()" % name, func) info = time.get_clock_info(name) if 'precision' in info: print("- announced precision: %s" % format_duration(info['precision'])) print("- implementation: %s" % info['implementation']) print("- resolution: %s" % format_duration(info['resolution'])) clock_ids = [name for name in dir(time) if name.startswith("CLOCK_")] clock_ids.sort() for clock_id_text in clock_ids: clock_id = getattr(time, clock_id_text) name = 'clock_gettime(%s)' % clock_id_text def gettime(): return time.clock_gettime(clock_id) try: gettime() except OSError as err: print("%s failed: %s" % (name, err)) continue test_clock(name, gettime) precision = time.clock_getres(clock_id) print("- announced precision: %s" % format_duration(precision))
def main(): 'Main code' global indir, outdir, args # Process command line options opt = argparse.ArgumentParser(description=__doc__.strip()) grp = opt.add_mutually_exclusive_group() grp.add_argument('-t', '--time', help='set time YYYY-MM-DDTHH:MM.SS, default=latest') grp.add_argument('-f', '--filetime', help='set time based on specified file') opt.add_argument('-s', '--summary', action='store_true', help='just print a summary of files and versions') opt.add_argument('-g', '--gitkeep', action='store_true', help='preserve any top level git dir in outdir') opt.add_argument('-p', '--path', help='only process files under given path') opt.add_argument('indir', help='input B2 archive containing all file versions ' ' (from --b2-versions)') opt.add_argument('outdir', nargs='?', help='output directory to recreate for given time') args = opt.parse_args() indir = Path(args.indir).expanduser() if not indir.is_dir(): opt.error('indir must be a directory') if not args.summary: if not args.outdir: opt.error('outdir must be specified') outdir = Path(args.outdir).expanduser() if outdir.exists(): if not outdir.is_dir(): opt.error('outdir must be a directory') if args.gitkeep: exgit.extend([str(d) for d in outdir.glob('.git*')]) outdir.mkdir(parents=True, exist_ok=True) if indir.stat().st_dev != outdir.stat().st_dev: opt.error('indir and outdir must on same file system') if args.filetime: afile = Path(args.filetime) if not afile.exists(): opt.error(f'{args.filetime} does not exist') argstime = datetime.fromtimestamp(afile.stat().st_mtime) elif args.time: argstime = datetime.strptime(args.time, TIMEFMT) # Add a large fraction to ensure we match again file times which # include msecs. argstime += timedelta( seconds=(1 - time.clock_getres(time.CLOCK_MONOTONIC))) else: argstime = None # Parse all files in the versioned indir parsedir(indir, parsefile) if args.summary: fnames = sorted(FileName.namemap) for fname in fnames: print(f'{fname}:') for fpath in FileName.namemap[fname].paths: vers = fmttime(fpath.version) if fpath.version \ else '----- current -----' print(f' {fmttime(fpath.time)} {vers} {fpath.size:8} B') return # Iterate through all files and restore version for given time for fname in FileName.namemap.values(): for index, tm in enumerate(fname.times): if argstime and tm > argstime: break else: index += 1 # Candidate files may all be newer than specified if index == 0: continue fp = fname.paths[index - 1] # If the latest version had a version string then this file may # have been deleted at this time if index < len(fname.times) or not fp.version or \ (argstime and argstime <= fp.version): addfile(fp, indir / fp.path, outdir / fname.name) # Delete any leftover files parsedir(outdir, delfile) # Delete all leftover empty dirs for root, dirs, files in os.walk(outdir, topdown=False): for name in dirs: dird = Path(root, name) if dird.parts[0] not in exgit: if not any(dird.iterdir()) and dird != outdir: print(f'deleting empty {dird.relative_to(outdir)}') dird.rmdir()
clocks.append("time") for name in clocks: func = getattr(time, name) test_clock("%s()" % name, func) info = time.get_clock_info(name) try: precision = info.precision print("- announced precision: %s" % format_duration(precision)) except AttributeError: pass print("- implementation: %s" % info.implementation) print("- resolution: %s" % format_duration(info.resolution)) clock_ids = [name for name in dir(time) if name.startswith("CLOCK_")] clock_ids.sort() for clock_id_text in clock_ids: clock_id = getattr(time, clock_id_text) name = "clock_gettime(%s)" % clock_id_text def gettime(): return time.clock_gettime(clock_id) try: gettime() except OSError as err: print("%s failed: %s" % (name, err)) continue test_clock(name, gettime) precision = time.clock_getres(clock_id) print("- announced precision: %s" % format_duration(precision))
def _extract_addr(connection, is_ipv6): if is_ipv6: family = socket.AF_INET6 extract = _mirror else: family = socket.AF_INET extract = embedded_ipv4 return (socket.inet_ntop(family, extract(connection.saddr)), socket.inet_ntop(family, extract(connection.daddr))) def _mirror(x): return x CLK_RES = time.clock_getres(time.CLOCK_MONOTONIC) * 1e6 # in us def _now_in_us(): return time.clock_gettime(time.CLOCK_MONOTONIC) * CLK_RES class DNS_STATUS(IntEnum): """The events that can be reported by the BPF filter.""" STATUS_QUERY = 1 STATUS_ANSWER = 2 STATUS_FAIL = 3 DNS_STATUS_TO_FLOW = {
print("- resolution in Python: %s" % format_duration(resolution)) clocks = ['clock', 'perf_counter', 'process_time'] if hasattr(time, 'monotonic'): clocks.append('monotonic') clocks.append('time') for name in clocks: func = getattr(time, name) test_clock("%s()" % name, func) info = time.get_clock_info(name) print("- implementation: %s" % info.implementation) print("- resolution: %s" % format_duration(info.resolution)) clock_ids = [name for name in dir(time) if name.startswith("CLOCK_")] clock_ids.sort() for clock_id_text in clock_ids: clock_id = getattr(time, clock_id_text) name = 'clock_gettime(%s)' % clock_id_text def gettime(): return time.clock_gettime(clock_id) try: gettime() except OSError as err: print("%s failed: %s" % (name, err)) continue test_clock(name, gettime) resolution = time.clock_getres(clock_id) print("- announced resolution: %s" % format_duration(resolution))