def fill_mnodelog(obj): obj.timestamp = np.datetime64(time_ns(), 'ns') + np.timedelta64( random.randint(1, 120), 's') obj.node_id = uuid.uuid4() obj.run_id = uuid.uuid4() obj.state = random.randint(1, 2) obj.ended = obj.timestamp + np.timedelta64(random.randint(1, 120), 's') obj.session = random.randint(1, 9007199254740992) obj.sent = obj.timestamp obj.seq = random.randint(1, 10000) obj.routers = random.randint(1, 32) obj.containers = random.randint(1, 32) obj.guests = random.randint(1, 32) obj.proxies = random.randint(1, 32) obj.marketmakers = random.randint(1, 32) obj.cpu_ctx_switches = random.randint(1, 1000000) # we can't just use random() here, since it won't work for roundtrip # data checking (eg 33.42830630594208 != 33.428306579589844) # obj.cpu_freq = random.random() * 100. obj.cpu_freq = rfloat() obj.cpu_guest = rfloat() obj.cpu_guest_nice = rfloat() obj.cpu_idle = rfloat() obj.cpu_interrupts = random.randint(1, 100000) obj.cpu_iotwait = rfloat() obj.cpu_irq = rfloat() obj.cpu_nice = rfloat() obj.cpu_soft_interrupts = random.randint(1, 100000) obj.cpu_softirq = rfloat() obj.cpu_steal = rfloat() obj.cpu_system = rfloat() obj.cpu_user = rfloat() obj.network_bytes_recv = random.randint(1, 2**32) obj.network_bytes_sent = random.randint(1, 2**32) obj.network_connection_af_inet = random.randint(1, 1000) obj.network_connection_af_inet6 = random.randint(1, 1000) obj.network_connection_af_unix = random.randint(1, 1000) obj.network_dropin = random.randint(1, 10000) obj.network_dropout = random.randint(1, 10000) obj.network_errin = random.randint(1, 10000) obj.network_errout = random.randint(1, 10000) obj.network_packets_recv = random.randint(1, 2**32) obj.network_packets_sent = random.randint(1, 2**32) M = 32 * 2**30 obj.memory_active = random.randint(1, M) obj.memory_available = random.randint(1, M) obj.memory_buffers = random.randint(1, M) obj.memory_cached = random.randint(1, M) obj.memory_free = random.randint(1, M) obj.memory_inactive = random.randint(1, M) obj.memory_percent = rfloat() obj.memory_shared = random.randint(1, M) obj.memory_slab = random.randint(1, M) obj.memory_total = random.randint(1, M) obj.memory_used = random.randint(1, M) M = 10 * 10 obj.disk_busy_time = random.randint(1, M) obj.disk_read_bytes = random.randint(1, M) obj.disk_read_count = random.randint(1, M) obj.disk_read_merged_count = random.randint(1, M) obj.disk_read_time = random.randint(1, M) obj.disk_write_bytes = random.randint(1, M) obj.disk_write_count = random.randint(1, M) obj.disk_write_merged_count = random.randint(1, M) obj.disk_write_time = random.randint(1, M)
def poll(self, verbose=False): """ Measure current stats value and return new stats. :returns: A deferred that resolves with a dict containing new process statistics. :rtype: :class:`twisted.internet.defer.Deferred` """ self._tick += 1 now = time_ns() if self._last_poll: self._last_period = now - self._last_poll if verbose: _current = { 'tick': self._tick, # the UTC timestamp when measurement was taken 'timestamp': now, # the effective last period in ns 'last_period': self._last_period, # duration in seconds the retrieval of sensor values took 'elapsed': self._elapsed, } else: _current = {} # uptime, as all durations, is in ns _current['uptime'] = int(now - psutil.boot_time() * 10**9) def _poll(current, last_value): # normalize with effective period diff = 1. if self._last_period: diff = self._last_period / 10**9 # int values: bytes_sent, bytes_recv, packets_sent, packets_recv, errin, errout, dropin, dropout current['network'] = dict(psutil.net_io_counters()._asdict()) # int values: read_count, write_count, read_bytes, write_bytes, read_time, write_time, read_merged_count, write_merged_count, busy_time current['disk'] = dict(psutil.disk_io_counters()._asdict()) if last_value: for k in ['network', 'disk']: d = current[k] for k2 in list(d.keys()): value = float(d[k2] - last_value[k][k2]) / diff d['{}_per_sec'.format(k2)] = int(value) # float values: user, nice, system, idle, iowait, irq, softirq, streal, guest, guest_nice current['cpu'] = dict( psutil.cpu_times_percent(interval=None)._asdict()) cpu_freq = psutil.cpu_freq() current['cpu']['freq'] = round( cpu_freq.current) if cpu_freq else None s = psutil.cpu_stats() current['cpu']['ctx_switches'] = s.ctx_switches current['cpu']['interrupts'] = s.interrupts current['cpu']['soft_interrupts'] = s.soft_interrupts # int values: total, available, used, free, active, inactive, buffers, cached, shared, slab # float values: percent current['memory'] = dict(psutil.virtual_memory()._asdict()) # Network connections res = {} conns = psutil.net_connections(kind='all') for c in conns: if c.family not in res: res[c.family] = 0 res[c.family] += 1 res2 = {} for f, cnt in res.items(): res2[f.name] = cnt current['network']['connection'] = res2 return current new_value = yield deferToThread(_poll, _current, self._last_value) self._elapsed = time_ns() - now new_value['elapsed'] = self._elapsed self._last_poll = now self._last_value = new_value returnValue(new_value)
def _test_mnodelog_bigtable(N, M, K): with TemporaryDirectory() as dbpath: with zlmdb.Database(dbpath, maxsize=(5 * 2**30)) as db: schema = Schema.attach(db) data = {} print() # fill table # started = time_ns() with db.begin(write=True) as txn: for i in range(N): rec = MNodeLog() fill_mnodelog(rec) key = (rec.timestamp, rec.node_id) schema.mnode_logs[txn, key] = rec data[key] = rec duration = (time_ns() - started) / 1000000000. rps = int(round(N / duration)) duration = int(round(duration)) print('Inserted {} records in {} seconds [{} records/sec]'.format( N, duration, rps)) skeys = sorted(data.keys()) # random single record selects # if True: started = time_ns() with db.begin() as txn: for i in range(M): key = random.choice(skeys) mnodelog = schema.mnode_logs[txn, key] assert mnodelog duration = (time_ns() - started) / 1000000000. rps = int(round(M / duration)) duration = int(round(duration)) print('Selected {} records in {} seconds [{} records/sec]'. format(M, duration, rps)) # random range counts # if True: started = time_ns() with db.begin() as txn: for i in range(K): # we select a fixed range of (max) 1000 elements: i1 = random.randint(0, len(skeys) - 1) i2 = random.randint(i1, min(len(skeys) - 1, i1 + 1000)) key1 = skeys[i1] key2 = skeys[i2] cnt = schema.mnode_logs.count_range(txn, from_key=key1, to_key=key2) assert cnt == len(skeys[i1:i2]) duration = (time_ns() - started) / 1000000000. rps = int(round(K / duration)) duration = int(round(duration)) print( 'Performed {} range counts in {} seconds [{} queries/sec]'. format(K, duration, rps))
def page_xbr_submit_onboard(): session['site_area'] = 'landing' session['site-page'] = None # ImmutableMultiDict([ # ('onboard_member_name', 'oberstet'), # ('onboard_member_email', '*****@*****.**'), # ('onboard_wallet_type', 'imported'), # ('onboard_wallet_address', '0x6231eECbA6e7983efe5ce6d16972E16cCcD97CE7'), # ('onboard_accept_eula', 'on') # ]) onboard_member_name = request.form.get('onboard_member_name', None) onboard_member_email = request.form.get('onboard_member_email', None) onboard_wallet_type = request.form.get('onboard_wallet_type', None) onboard_wallet_address = request.form.get('onboard_wallet_address', None) onboard_accept_eula = request.form.get('onboard_accept_eula', None) print('page_xbr_submit_onboard:') print(' onboard_member_name', onboard_member_name) print(' onboard_member_email', onboard_member_email) print(' onboard_wallet_type', onboard_wallet_type) print(' onboard_wallet_address', onboard_wallet_address) print(' onboard_accept_eula', onboard_accept_eula) if onboard_wallet_type not in Account.WALLET_TYPE_FROM_STRING: return render_template('xbr_onboard_submit_error.html', onboard_member_error='Invalid wallet type "{}"'.format(onboard_wallet_type)) else: onboard_wallet_type = Account.WALLET_TYPE_FROM_STRING[onboard_wallet_type] if onboard_accept_eula != 'on': return render_template('xbr_onboard_submit_error.html', onboard_member_error='EULA must be accepted') # eg, onboard_wallet_address = 0x6231eECbA6e7983efe5ce6d16972E16cCcD97CE7 if len(onboard_wallet_address) != 42: return render_template('xbr_onboard_submit_error.html', onboard_member_error='Invalid wallet address "{}"'.format(onboard_wallet_address)) try: onboard_wallet_address = binascii.a2b_hex(onboard_wallet_address[2:]) except: return render_template('xbr_onboard_submit_error.html', onboard_member_error='Invalid wallet address "{}"'.format(onboard_wallet_address)) if not validate_email(onboard_member_email, check_mx=False, verify=False): return render_template('xbr_onboard_submit_error.html', onboard_member_error='Invalid email address "{}"'.format(onboard_member_email)) if not is_valid_username(onboard_member_name): return render_template( 'xbr_onboard_submit_error.html', onboard_member_error='Invalid username "{}" - must be a string matching the regular expression {}'.format( onboard_member_name, _USERNAME_PAT_STR)) db = app.config['DB'] schema = app.config['DBSCHEMA'] with db.begin() as txn: account_oid = schema.idx_accounts_by_username[txn, onboard_member_name] if account_oid: return render_template('xbr_onboard_submit_error.html', onboard_member_error='Username "{}" already exists'.format(onboard_member_name)) vaction_oid = uuid.uuid4() vaction_code = generate_activation_code() mailgw = app.config['MAILGUN'] try: mailgw.send_onboard_verification(onboard_member_email, vaction_oid, vaction_code) except Exception as e: return render_template('xbr_onboard_submit_error.html', onboard_member_error='Failed to submit email via mailgun (exception {})'.format(e)) on_success_url = '{}/member'.format(app.config['WEBSITE_URL']) on_error_url = None verified_data = { 'onboard_member_name': onboard_member_name, 'onboard_member_email': onboard_member_email, 'onboard_wallet_type': onboard_wallet_type, 'onboard_wallet_address': onboard_wallet_address, 'on_success_url': on_success_url, 'on_error_url': on_error_url, } with db.begin(write=True) as txn: # double check (again) for username collision, as the mailgun email submit happens async in above after # we initially checked for collision account_oid = schema.idx_accounts_by_username[txn, onboard_member_name] if account_oid: return render_template('xbr_onboard_submit_error.html', onboard_member_error='Username "{}" already exists'.format(onboard_member_name)) vaction = VerifiedAction() vaction.oid = vaction_oid vaction.created = np.datetime64(time_ns(), 'ns') vaction.vtype = VerifiedAction.VERIFICATION_TYPE_ONBOARD_MEMBER vaction.vstatus = VerifiedAction.VERIFICATION_STATUS_PENDING vaction.vcode = vaction_code # vaction.verified_oid = None vaction.verified_data = verified_data schema.verified_actions[txn, vaction.oid] = vaction return render_template('xbr_onboard_submit_success.html', onboard_member_email=onboard_member_email, vaction_oid=vaction_oid)
def poll(self, verbose=False): """ Measure current stats value and return new stats. :returns: A deferred that resolves with a dict containing new process statistics. :rtype: :class:`twisted.internet.defer.Deferred` """ self._tick += 1 now = time_ns() if self._last_poll: self._last_period = now - self._last_poll if verbose: _current = { 'tick': self._tick, # the UTC timestamp when measurement was taken 'timestamp': now, # the effective last period in ns 'last_period': self._last_period, # duration in seconds the retrieval of sensor values took 'elapsed': self._elapsed, } else: _current = {} def _poll(current, last_value): # normalize with effective period diff = 1. if self._last_period: diff = self._last_period / 10**9 # cmd_started = time.time() current['type'] = self._worker_type current['pid'] = self._p.pid current['status'] = self._p.status() if verbose: current['exe'] = self._p.exe() current['user'] = self._p.username() current['name'] = self._p.name() current['cmdline'] = ' '.join(self._p.cmdline()) created = self._p.create_time() current['created'] = utcstr( datetime.datetime.fromtimestamp(created)) current['num_fds'] = self._p.num_fds() current['num_threads'] = self._p.num_threads() current['num_fds'] = self._p.num_fds() # the following values are cumulative since process creation! # num_ctx_switches = self._p.num_ctx_switches() current['num_ctx_switches_voluntary'] = num_ctx_switches.voluntary current[ 'num_ctx_switches_involuntary'] = num_ctx_switches.involuntary if self._has_io_counters: iocounters = self._p.io_counters() current['read_ios'] = iocounters.read_count current['write_ios'] = iocounters.write_count current['read_bytes'] = iocounters.read_bytes current['write_bytes'] = iocounters.write_bytes else: current['read_ios'] = None current['write_ios'] = None current['read_bytes'] = None current['write_bytes'] = None cpu = self._p.cpu_times() current['cpu_user'] = cpu.user current['cpu_system'] = cpu.system # current['command_duration'] = time.time() - cmd_started for key in [ 'read_ios', 'write_ios', 'read_bytes', 'write_bytes', 'cpu_user', 'cpu_system', 'num_ctx_switches_voluntary', 'num_ctx_switches_involuntary' ]: if last_value and last_value[key] is not None: value = float(current[key] - last_value[key]) / diff current['{}_per_sec'.format(key)] = int(value) return current new_value = yield deferToThread(_poll, _current, self._last_value) self._last_poll = now self._last_value = new_value returnValue(new_value)
def new_key(): return os.urandom(20), np.datetime64(time_ns(), 'ns'), uuid.uuid4()
def new_key(): return np.datetime64(time_ns(), 'ns')
def parse(node_id, heartbeat): assert isinstance(node_id, uuid.UUID) assert type(heartbeat) == dict assert 'timestamp' in heartbeat and type(heartbeat['timestamp']) == int obj = MNodeLog() obj._timestamp = np.datetime64(time_ns(), 'ns') obj._node_id = node_id obj._run_id = uuid.UUID(bytes=b'\0' * 16) obj._state = heartbeat.get('state', None) obj._ended = np.datetime64(heartbeat['ended'], 'ns') if heartbeat.get( 'ended', None) else None obj._session = heartbeat.get('session', None) obj._sent = np.datetime64(heartbeat['timestamp'], 'ns') if heartbeat.get('timestamp', None) else None obj._seq = heartbeat.get('seq', None) workers = heartbeat.get('workers', {}) obj._routers = workers.get('router', None) obj._containers = workers.get('container', None) obj._guests = workers.get('guest', None) obj._proxies = workers.get('proxy', None) obj._marketmakers = workers.get('xbrmm', None) system = heartbeat.get('system', {}) system_cpu = system.get('cpu', {}) system_net = system.get('network', {}) system_mem = system.get('memory', {}) system_dsk = system.get('disk', {}) obj._cpu_ctx_switches = system_cpu.get('ctx_switches', None) obj._cpu_freq = system_cpu.get('freq', None) obj._cpu_guest = system_cpu.get('guest', None) obj._cpu_guest_nice = system_cpu.get('guest_nice', None) obj._cpu_idle = system_cpu.get('idle', None) obj._cpu_interrupts = system_cpu.get('interrupts', None) obj._cpu_iotwait = system_cpu.get('iotwait', None) obj._cpu_irq = system_cpu.get('irq', None) obj._cpu_nice = system_cpu.get('nice', None) obj._cpu_soft_interrupts = system_cpu.get('soft_interrupts', None) obj._cpu_softirq = system_cpu.get('softirq', None) obj._cpu_steal = system_cpu.get('steal', None) obj._cpu_system = system_cpu.get('system', None) obj._cpu_user = system_cpu.get('user', None) obj._network_bytes_recv = system_net.get('bytes_recv', None) obj._network_bytes_sent = system_net.get('bytes_sent', None) obj._network_packets_recv = system_net.get('packets_recv', None) obj._network_packets_sent = system_net.get('packets_sent', None) obj._network_dropin = system_net.get('dropin', None) obj._network_dropout = system_net.get('dropout', None) obj._network_errin = system_net.get('errin', None) obj._network_errout = system_net.get('errout', None) connection = system_net.get('connection', {}) obj._network_connection_af_inet = connection.get('AF_INET', None) obj._network_connection_af_inet6 = connection.get('AF_INET6', None) obj._network_connection_af_unix = connection.get('AF_UNIX', None) obj._memory_active = system_mem.get('active', None) obj._memory_available = system_mem.get('available', None) obj._memory_buffers = system_mem.get('buffers', None) obj._memory_cached = system_mem.get('cached', None) obj._memory_free = system_mem.get('free', None) obj._memory_inactive = system_mem.get('inactive', None) obj._memory_percent = system_mem.get('percent', None) obj._memory_shared = system_mem.get('shared', None) obj._memory_slab = system_mem.get('slab', None) obj._memory_total = system_mem.get('total', None) obj._memory_used = system_mem.get('used', None) obj._disk_busy_time = system_dsk.get('busy_time', None) obj._disk_read_bytes = system_dsk.get('read_bytes', None) obj._disk_read_count = system_dsk.get('read_count', None) obj._disk_read_merged_count = system_dsk.get('read_merged_count', None) obj._disk_read_time = system_dsk.get('read_time', None) obj._disk_write_bytes = system_dsk.get('write_bytes', None) obj._disk_write_count = system_dsk.get('write_count', None) obj._disk_write_merged_count = system_dsk.get('write_merged_count', None) obj._disk_write_time = system_dsk.get('write_time', None) return obj
def onJoin(self, details): # self.log.info("{obj_id} connected: {details}", obj_id=id(self), details=details) bar = { 'realm': details.realm, 'session': details.session, 'authid': details.authid, 'authrole': details.authrole, 'authmethod': details.authmethod, 'authprovider': details.authprovider, 'authextra': details.authextra, 'serializer': details.serializer, 'transport': details.transport, 'node': details.authextra.get('x_cb_node', None), 'worker': details.authextra.get('x_cb_worker', None), 'pid': details.authextra.get('x_cb_pid', None), 'proxy_node': details.authextra.get('x_cb_proxy_node', None), 'proxy_worker': details.authextra.get('x_cb_proxy_worker', None), 'x_cb_proxy_pid': details.authextra.get('x_cb_proxy_pid', None), } self._wamp_sessions[id(self)] = bar self._wamp_log.append( (id(self), 'JOIN', time_ns(), self.session_id, bar)) # print(id(self), 'JOIN') mytopic1 = u"com.example.mytopic1" def on_mytopic1(*args, **kwargs): details = kwargs.pop('details', None) assert 'foo' in kwargs and type(kwargs['foo']) == str and len( kwargs['foo']) == 22 assert 'bar' in kwargs and type(kwargs['bar']) == dict self._wamp_log.append( (id(self), 'EVENT', time_ns(), self.session_id, mytopic1, args, kwargs, details.publication if details else None)) # print(id(self), 'EVENT') sub = yield self.subscribe(on_mytopic1, mytopic1) self._running = True ready1 = self.config.extra.get('ready1', None) if ready1 and not ready1.called: ready1.callback((self, bar)) continue1 = self.config.extra.get('continue1', None) if continue1: yield continue1 pid = os.getpid() counter = 0 print('starting loop on {} for {} repeats ..'.format( id(self), self._repeat)) while self.is_connected() and counter < self._repeat: # print("pid {} publish {} to '{}'".format(pid, counter, mytopic1)) baz = os.urandom(10) args = [pid, counter] kwargs = {'foo': '0x' + binascii.b2a_hex(baz).decode(), 'bar': bar} pub = yield self.publish( mytopic1, *args, **kwargs, options=PublishOptions(acknowledge=True, exclude_me=False), ) self._wamp_log.append( (id(self), 'PUBLISH', time_ns(), self.session_id, mytopic1, args, kwargs, pub.id if pub else None)) # print(id(self), 'PUBLISH') counter += 1 yield sleep(.1) ready2 = self.config.extra.get('ready2', None) if ready2 and not ready2.called: ready2.callback((self, bar)) continue2 = self.config.extra.get('continue2', None) if continue2: yield continue2 yield sub.unsubscribe() self.leave()
def stats(self, reset=True, details=False): """ Get (and reset) serializer statistics. :param reset: If ``True``, reset the serializer statistics. :type reset: bool :param details: If ``True``, return detailed statistics split up by serialization/unserialization. :type details: bool :return: Serializer statistics, eg: .. code-block:: json { "timestamp": 1574156576688704693, "duration": 34000000000, "bytes": 0, "messages": 0, "rated_messages": 0 } :rtype: dict """ assert (type(reset) == bool) assert (type(details) == bool) self._stats_cycle += 1 if details: data = { 'cycle': self._stats_cycle, 'serializer': self.SERIALIZER_ID, 'timestamp': self._stats_reset, 'duration': time_ns() - self._stats_reset, 'serialized': { 'bytes': self._serialized_bytes, 'messages': self._serialized_messages, 'rated_messages': self._serialized_rated_messages, }, 'unserialized': { 'bytes': self._unserialized_bytes, 'messages': self._unserialized_messages, 'rated_messages': self._unserialized_rated_messages, } } else: data = { 'cycle': self._stats_cycle, 'serializer': self.SERIALIZER_ID, 'timestamp': self._stats_reset, 'duration': time_ns() - self._stats_reset, 'bytes': self._serialized_bytes + self._unserialized_bytes, 'messages': self._serialized_messages + self._unserialized_messages, 'rated_messages': self._serialized_rated_messages + self._unserialized_rated_messages, } if reset: self._serialized_bytes = 0 self._serialized_messages = 0 self._serialized_rated_messages = 0 self._unserialized_bytes = 0 self._unserialized_messages = 0 self._unserialized_rated_messages = 0 self._stats_reset = time_ns() return data
def unserialize(self, payload, isBinary=None): """ Implements :func:`autobahn.wamp.interfaces.ISerializer.unserialize` """ if isBinary is not None: if isBinary != self._serializer.BINARY: raise ProtocolError( "invalid serialization of WAMP message (binary {0}, but expected {1})" .format(isBinary, self._serializer.BINARY)) try: raw_msgs = self._serializer.unserialize(payload) except Exception as e: raise ProtocolError( "invalid serialization of WAMP message: {0} {1}".format( type(e).__name__, e)) if self._serializer.NAME == 'flatbuffers': msgs = raw_msgs else: msgs = [] for raw_msg in raw_msgs: if type(raw_msg) != list: raise ProtocolError( "invalid type {0} for WAMP message".format( type(raw_msg))) if len(raw_msg) == 0: raise ProtocolError("missing message type in WAMP message") message_type = raw_msg[0] if type(message_type) != int: raise ProtocolError( "invalid type {0} for WAMP message type".format( type(message_type))) Klass = self.MESSAGE_TYPE_MAP.get(message_type) if Klass is None: raise ProtocolError( "invalid WAMP message type {0}".format(message_type)) # this might again raise `ProtocolError` .. msg = Klass.parse(raw_msg) msgs.append(msg) # maintain statistics for unserialized WAMP message data self._unserialized_bytes += len(payload) self._unserialized_messages += len(msgs) self._unserialized_rated_messages += int( math.ceil(float(len(payload)) / self.RATED_MESSAGE_SIZE)) # maybe auto-reset and trigger user callback .. if self._autoreset_callback and ( (self._autoreset_duration and (time_ns() - self._stats_reset) >= self._autoreset_duration) or (self._autoreset_rated_messages and self.stats_rated_messages() >= self._autoreset_rated_messages)): stats = self.stats(reset=True) self._autoreset_callback(stats) return msgs
def parse(data): assert type(data) == dict, 'data parsed must have type dict, but was "{}"'.format(type(data)) obj = MasterNodeUsage() timestamp = data.get('timestamp', None) assert timestamp is None or type( timestamp) == int, '"timestamp" must have type int, but was "{}"'.format(type(timestamp)) if timestamp is None: # set current time as default obj._timestamp = np.datetime64(time_ns(), 'ns') else: # set the value contained in the parsed data obj._timestamp = np.datetime64(timestamp, 'ns') timestamp_from = data.get('timestamp_from', None) assert timestamp_from is None or type( timestamp_from) == int, '"timestamp_from" must have type int, but was "{}"'.format( type(timestamp_from)) obj._timestamp_from = np.datetime64(timestamp_from, 'ns') if timestamp_from is not None else None mrealm_id = data.get('mrealm_id', None) assert mrealm_id is None or type( mrealm_id) == str, '"mrealm_id" must have type str, but was "{}"'.format(type(mrealm_id)) if mrealm_id: obj._mrealm_id = uuid.UUID(mrealm_id) metering_id = data.get('metering_id', None) assert metering_id is None or type( metering_id) == str, '"metering_id" must have type str, but was "{}"'.format(type(metering_id)) if metering_id: obj._metering_id = uuid.UUID(metering_id) pubkey = data.get('pubkey', None) assert pubkey is None or type(pubkey) == bytes and len( pubkey) == 32, '"pubkey" must have type bytes of length 32, but was "{}" of length {}'.format( type(pubkey), len(pubkey) if type(pubkey) == bytes else None) obj._pubkey = pubkey client_ip_address = data.get('client_ip_address', None) assert client_ip_address is None or type(client_ip_address) == bytes and len(client_ip_address) in [ 4, 16 ], '"client_ip_address" must have type bytes of length 4 or 16, but was "{}" of length {}'.format( type(client_ip_address), len(client_ip_address) if type(client_ip_address) == bytes else None) obj._client_ip_address = client_ip_address client_ip_version = data.get('client_ip_version', None) assert client_ip_version is None or client_ip_version == 0 or ( type(client_ip_version) == int and client_ip_version in [4, 6] ), '"client_ip_version" must have value [4, 6], but was "{}"'.format(client_ip_version) obj._client_ip_version = client_ip_version client_ip_port = data.get('client_ip_port', None) assert client_ip_port is None or client_ip_port == 0 or ( type(client_ip_port) == int and client_ip_port in range( 2**16)), '"client_ip_port" must have value [0, 2**16[, but was "{}"'.format(client_ip_port) obj._client_ip_port = client_ip_port seq = data.get('seq', None) assert seq is None or type(seq) == int, '"seq" must have type int, but was "{}"'.format(type(seq)) obj._seq = seq sent = data.get('sent', None) assert sent is None or type(sent) == int, '"sent" must have type int, but was "{}"'.format(type(sent)) if sent is not None: obj._sent = np.datetime64(sent, 'ns') if sent else None processed = data.get('processed', None) assert processed is None or type( processed) == int, '"processed" must have type int, but was "{}"'.format(type(processed)) obj._processed = np.datetime64(processed, 'ns') if processed else None status = data.get('status', 0) assert status is None or (type(status) == int and status in range(4)), '"status" must have type int, but was "{}"'.format( type(status)) obj._status = status status_message = data.get('status_message', None) assert status_message is None or type( status_message) == str, '"status_message" must have type str, but was "{}"'.format( type(status_message)) obj._status_message = status_message # metering data: count = data.get('count', None) assert count is None or type(count) == int obj._count = count total = data.get('total', None) assert total is None or type(total) == int obj._total = total nodes = data.get('nodes', None) assert nodes is None or type(nodes) == int obj._nodes = nodes controllers = data.get('controllers', None) assert controllers is None or type(controllers) == int obj._controllers = controllers hostmonitors = data.get('hostmonitors', None) assert hostmonitors is None or type(hostmonitors) == int obj._hostmonitors = hostmonitors routers = data.get('routers', None) assert routers is None or type(routers) == int obj._routers = routers containers = data.get('containers', None) assert containers is None or type(containers) == int obj._containers = containers guests = data.get('guests', None) assert guests is None or type(guests) == int obj._guests = guests proxies = data.get('proxies', None) assert proxies is None or type(proxies) == int obj._proxies = proxies marketmakers = data.get('marketmakers', None) assert marketmakers is None or type(marketmakers) == int obj._marketmakers = marketmakers sessions = data.get('sessions', None) assert sessions is None or type(sessions) == int obj._sessions = sessions msgs_call = data.get('msgs_call', None) assert msgs_call is None or type(msgs_call) == int obj._msgs_call = msgs_call msgs_yield = data.get('msgs_yield', None) assert msgs_yield is None or type(msgs_yield) == int obj._msgs_yield = msgs_yield msgs_invocation = data.get('msgs_invocation', None) assert msgs_invocation is None or type(msgs_invocation) == int obj._msgs_invocation = msgs_invocation msgs_result = data.get('msgs_result', None) assert msgs_result is None or type(msgs_result) == int obj._msgs_result = msgs_result msgs_error = data.get('msgs_error', None) assert msgs_error is None or type(msgs_error) == int obj._msgs_error = msgs_error msgs_publish = data.get('msgs_publish', None) assert msgs_publish is None or type(msgs_publish) == int obj._msgs_publish = msgs_publish msgs_published = data.get('msgs_published', None) assert msgs_published is None or type(msgs_published) == int obj._msgs_published = msgs_published msgs_event = data.get('msgs_event', None) assert msgs_event is None or type(msgs_event) == int obj._msgs_event = msgs_event msgs_register = data.get('msgs_register', None) assert msgs_register is None or type(msgs_register) == int obj._msgs_register = msgs_register msgs_registered = data.get('msgs_registered', None) assert msgs_registered is None or type(msgs_registered) == int obj._msgs_registered = msgs_registered msgs_subscribe = data.get('msgs_subscribe', None) assert msgs_subscribe is None or type(msgs_subscribe) == int obj._msgs_subscribe = msgs_subscribe msgs_subscribed = data.get('msgs_subscribed', None) assert msgs_subscribed is None or type(msgs_subscribed) == int obj._msgs_subscribed = msgs_subscribed return obj
def test_time_ns(framework): now = txaio.time_ns() assert now > 0
def parse(mrealm_id, node_id, worker_id, heartbeat): assert isinstance(mrealm_id, uuid.UUID) assert isinstance(node_id, uuid.UUID) assert type(worker_id) == str assert type(heartbeat) == dict assert 'timestamp' in heartbeat and type(heartbeat['timestamp']) == int assert 'seq' in heartbeat and type(heartbeat['seq']) == int assert 'type' in heartbeat and type(heartbeat['type']) == str obj = MWorkerLog() obj._timestamp = np.datetime64(time_ns(), 'ns') obj._period = heartbeat.get('period', None) obj._mrealm_id = mrealm_id obj._node_id = node_id obj._worker_id = worker_id obj._sent = np.datetime64(heartbeat['timestamp'], 'ns') if heartbeat.get('timestamp', None) else None obj._seq = heartbeat.get('seq', None) obj._type = MWorkerLog.WORKER_TYPES.get(heartbeat.get('type', None), None) obj._state = heartbeat.get('state', None) process = heartbeat.get('process', {}) obj._num_fds = process.get('num_fds', 0) obj._num_threads = process.get('num_threads', 0) obj._num_ctx_switches_involuntary = process.get( 'num_ctx_switches_involuntary', 0) obj._num_ctx_switches_involuntary_per_sec = process.get( 'num_ctx_switches_involuntary_per_sec', 0) obj._num_ctx_switches_voluntary = process.get( 'num_ctx_switches_voluntary', 0) obj._num_ctx_switches_voluntary_per_sec = process.get( 'num_ctx_switches_voluntary_per_sec', 0) obj._cpu_system = process.get('cpu_system', 0) obj._cpu_system_per_sec = process.get('cpu_system_per_sec', 0) obj._cpu_wait = process.get('cpu_wait', 0) obj._cpu_wait_per_sec = process.get('cpu_wait_per_sec', 0) obj._cpu_user = process.get('cpu_user', 0) obj._cpu_user_per_sec = process.get('cpu_user_per_sec', 0) obj._read_bytes = process.get('read_bytes', 0) obj._read_bytes_per_sec = process.get('read_bytes_per_sec', 0) obj._read_ios = process.get('read_ios', 0) obj._read_ios_per_sec = process.get('read_ios_per_sec', 0) obj._write_bytes = process.get('write_bytes', 0) obj._write_bytes_per_sec = process.get('write_bytes_per_sec', 0) obj._write_bytes_per_sec = process.get('write_bytes_per_sec', 0) obj._write_ios_per_sec = process.get('write_ios_per_sec', 0) obj._sent_bytes = process.get('sent_bytes', 0) obj._sent_bytes_per_sec = process.get('sent_bytes_per_sec', 0) obj._sent_ios = process.get('sent_ios', 0) obj._sent_ios_per_sec = process.get('sent_ios_per_sec', 0) obj._recv_bytes = process.get('recv_bytes', 0) obj._recv_bytes_per_sec = process.get('recv_bytes_per_sec', 0) obj._recv_ios = process.get('recv_ios', 0) obj._recv_ios_per_sec = process.get('recv_ios_per_sec', 0) router = heartbeat.get('router', {}) obj._router_roles = router.get('roles', 0) obj._router_sessions = router.get('sessions', 0) messages = router.get('messages', {}) obj._recv_call = messages.get('call', 0) obj._recv_yield = messages.get('yield', 0) obj._sent_invocation = messages.get('invocation', 0) obj._sent_result = messages.get('result', 0) obj._recv_publish = messages.get('publish', 0) obj._sent_published = messages.get('published', 0) obj._sent_event = messages.get('event', 0) obj._recv_register = messages.get('register', 0) obj._sent_registered = messages.get('registered', 0) obj._recv_subscribe = messages.get('subscribe', 0) obj._sent_subscribed = messages.get('subscribed', 0) return obj
async def on_rotate(key_series): key_id = key_series.key_id self._keys_map[key_id] = key_series # FIXME: expose the knobs hard-coded in below .. # offer the key to the market maker (retry 5x in specific error cases) retries = 5 while retries: try: valid_from = time_ns() - 10 * 10**9 delegate = self._addr # FIXME: sign the supplied offer information using self._pkey signature = os.urandom(65) provider_id = self._provider_id offer = await self._session.call( 'xbr.marketmaker.place_offer', key_id, api_id, prefix, valid_from, delegate, signature, privkey=None, price=pack_uint256(price) if price is not None else None, categories=categories, expires=None, copies=None, provider_id=provider_id) self.log.info( '{tx_type} key "{key_id}" offered for {price} [api_id={api_id}, prefix="{prefix}", delegate="{delegate}"]', tx_type=hl('XBR OFFER ', color='magenta'), key_id=hl(uuid.UUID(bytes=key_id)), api_id=hl(uuid.UUID(bytes=api_id)), price=hl(str( int(price / 10**18) if price is not None else 0) + ' XBR', color='magenta'), delegate=hl(binascii.b2a_hex(delegate).decode()), prefix=hl(prefix)) self.log.debug('offer={offer}', offer=offer) break except ApplicationError as e: if e.error == 'wamp.error.no_such_procedure': self.log.warn( 'xbr.marketmaker.offer: procedure unavailable!') else: self.log.failure() break except TransportLost: self.log.warn( 'TransportLost while calling xbr.marketmaker.offer!') break except: self.log.failure() retries -= 1 self.log.warn( 'Failed to place offer for key! Retrying {retries}/5 ..', retries=retries) await asyncio.sleep(1)
def new_key(): return np.datetime64(time_ns(), 'ns'), os.urandom(32)
def start_link(self, link_id, link_config, caller): assert type(link_id) == str assert isinstance(link_config, RLinkConfig) assert isinstance(caller, SessionIdent) if link_id in self._links: raise ApplicationError('crossbar.error.already_running', 'router link {} already running'.format(link_id)) # setup local session # local_extra = { 'other': None, 'on_ready': Deferred(), 'rlink': link_id, 'forward_events': link_config.forward_local_events, 'forward_invocations': link_config.forward_local_invocations, } local_realm = self._realm.config['name'] local_authid = link_config.authid or util.generate_serial_number() local_authrole = 'trusted' local_config = ComponentConfig(local_realm, local_extra) local_session = RLinkLocalSession(local_config) # setup remote session # remote_extra = { 'rlink_manager': self, 'other': None, 'on_ready': Deferred(), 'authid': link_config.authid, 'exclude_authid': link_config.exclude_authid, 'forward_events': link_config.forward_remote_events, 'forward_invocations': link_config.forward_remote_invocations, } remote_realm = link_config.realm remote_config = ComponentConfig(remote_realm, remote_extra) remote_session = RLinkRemoteSession(remote_config) # cross-connect the two sessions # local_extra['other'] = remote_session remote_extra['other'] = local_session # the rlink # rlink = RLink(link_id, link_config) self._links[link_id] = rlink local_extra['tracker'] = rlink # create connecting client endpoint # connecting_endpoint = create_connecting_endpoint_from_config( link_config.transport['endpoint'], self._controller.cbdir, self._controller._reactor, self.log) try: # connect the local session # self._realm.controller._router_session_factory.add( local_session, self._realm.router, authid=local_authid, authrole=local_authrole, authextra=local_extra) yield local_extra['on_ready'] # connect the remote session # # remote connection parameters to ApplicationRunner: # # url: The WebSocket URL of the WAMP router to connect to (e.g. ws://somehost.com:8090/somepath) # realm: The WAMP realm to join the application session to. # extra: Optional extra configuration to forward to the application component. # serializers: List of :class:`autobahn.wamp.interfaces.ISerializer` (or None for default serializers). # ssl: None or :class:`twisted.internet.ssl.CertificateOptions` # proxy: Explicit proxy server to use; a dict with ``host`` and ``port`` keys # headers: Additional headers to send (only applies to WAMP-over-WebSocket). # max_retries: Maximum number of reconnection attempts. Unlimited if set to -1. # initial_retry_delay: Initial delay for reconnection attempt in seconds (Default: 1.0s). # max_retry_delay: Maximum delay for reconnection attempts in seconds (Default: 60s). # retry_delay_growth: The growth factor applied to the retry delay between reconnection attempts (Default 1.5). # retry_delay_jitter: A 0-argument callable that introduces nose into the delay. (Default random.random) # remote_runner = ApplicationRunner( url=link_config.transport['url'], realm=remote_realm, extra=remote_extra) yield remote_runner.run( remote_session, start_reactor=False, auto_reconnect=True, endpoint=connecting_endpoint, reactor=self._controller._reactor) yield remote_extra['on_ready'] except: # make sure to remove the half-initialized link from our map .. del self._links[link_id] # .. and then re-raise raise # the router link is established: store final infos rlink.started = time_ns() rlink.started_by = caller rlink.local = local_session rlink.remote = remote_session return rlink
def new_key(): return _StringKeysMixin.new_key(), np.datetime64(time_ns(), 'ns')
def new_key(): return random.randint(0, 2**16), uuid.uuid4(), np.datetime64( time_ns(), 'ns')
def twisted_main(reactor, stdscr=None, mrealms=None, management_url=None, privkey_file=None): mrealms = mrealms or ['default'] try: if stdscr: stdscr.clear() y = 5 for line in personality.Personality.BANNER.splitlines(): stdscr.addstr(y, 20, line, curses.color_pair(227)) y += 1 y += 3 stdscr.addstr( y, 24, 'Please wait while collecting data from managed nodes ...') stdscr.refresh() while True: if stdscr: stdscr.clear() y = 0 stdscr.addstr(y, 0, '=' * 240) y += 1 x = 0 stdscr.addstr(y, x, 'Node') x += 34 stdscr.addstr(y, x, 'Mgmt Realm', curses.color_pair(14)) x += 15 stdscr.addstr(y, x, 'Node ID', curses.color_pair(14)) x += 25 stdscr.addstr(y, x, 'Node OID', curses.color_pair(14)) x += 40 stdscr.addstr(y, x, 'Status') x += 10 stdscr.addstr(y, x, 'Last Heartbeat') x += 12 x += 4 stdscr.addstr(y, x + 0, ' Usr') stdscr.addstr(y, x + 5, ' Sys') stdscr.addstr(y, x + 10, ' Idl') x += 1 stdscr.addstr(y, x + 15, ' Mem') stdscr.addstr(y, x + 20, ' IPv4 sckts') x += 5 * 5 x += 11 stdscr.addstr(y, x + 0, ' Pxy', curses.color_pair(41)) stdscr.addstr(y, x + 4, ' Rtr', curses.color_pair(41)) stdscr.addstr(y, x + 8, ' Xbr', curses.color_pair(41)) stdscr.addstr(y, x + 12, ' Cnt', curses.color_pair(41)) stdscr.addstr(y, x + 16, ' Gst', curses.color_pair(41)) x += 4 * 5 x += 4 stdscr.addstr(y, x + 0, ' Rlm', curses.color_pair(227)) stdscr.addstr(y, x + 5, ' Rls') stdscr.addstr(y, x + 10, ' Rlk') stdscr.addstr(y, x + 15, ' Sessions', curses.color_pair(41)) stdscr.addstr(y, x + 25, ' Messages', curses.color_pair(41)) x += 5 * 5 y += 1 stdscr.addstr(y, 0, '-' * 240) y += 1 last_mrealm = None for mrealm in mrealms: if stdscr: if last_mrealm: stdscr.addstr(y, 0, '.' * 240) y += 1 try: session, _ = yield create_management_session( url=management_url, realm=mrealm, privkey_file=privkey_file) except Exception as e: print(e) node_oids = yield session.call( 'crossbarfabriccenter.mrealm.get_nodes') if not stdscr: pprint(node_oids) for node_oid in node_oids: node = yield session.call( 'crossbarfabriccenter.mrealm.get_node', node_oid) if not stdscr: pprint(node) if True: node_authid = node['authid'] # node_pubkey = node['pubkey'] if node and node['status'] == 'online': node_status = yield session.call( 'crossbarfabriccenter.remote.node.get_status', node_oid) if not stdscr: pprint(node_status) node_system_stats = yield session.call( 'crossbarfabriccenter.remote.node.get_system_stats', node_oid) if not stdscr: pprint(node_system_stats) cpu_user = node_system_stats['cpu']['user'] cpu_system = node_system_stats['cpu']['system'] cpu_idle = node_system_stats['cpu']['idle'] # memory_total = node_system_stats['memory']['total'] # memory_avail = node_system_stats['memory']['available'] memory_perc = node_system_stats['memory'][ 'percent'] # network_recv = node_system_stats['network']['bytes_recv_per_sec'] # network_sent = node_system_stats['network']['bytes_sent_per_sec'] network_conns = node_system_stats['network'][ 'connection']['AF_INET'] # tz = get_timezone('Europe/Berlin') # started = format_datetime(iso8601.parse_date(node_status['started']), tzinfo=tz, locale='de_DE', format='long') last_heartbeat = np.datetime64( node['timestamp'], 'ns') now = np.datetime64(time_ns(), 'ns') if now > last_heartbeat: last_heartbeat_ago = str( (now - last_heartbeat).astype("timedelta64[s]")) else: last_heartbeat_ago = None node_title = node_status['title'] # get IDs for all workers running in this node worker_info = {} router_info = {} workers = yield session.call( 'crossbarfabriccenter.remote.node.get_workers', node_oid) for worker_id in workers: # get worker detail information # {'id': 'xbr1', 'pid': 11507, 'type': 'marketplace', 'status': 'started', 'created': '2020-06-22T06:15:44.589Z', 'started': '2020-06-22T06:15:48.224Z', 'startup_time': 3.635574, 'uptime': 13949.814363} worker = yield session.call( 'crossbarfabriccenter.remote.node.get_worker', node_oid, worker_id) if not stdscr: pprint(worker) if worker['status'] == 'started': if worker['type'] not in worker_info: worker_info[worker['type']] = 0 worker_info[worker['type']] += 1 if worker['type'] == 'router': # get IDs for all realm running in router worker realm_oids = yield session.call( 'crossbarfabriccenter.remote.router.get_router_realms', node_oid, worker_id) for realm_oid in realm_oids: # get realm detail information realm = yield session.call( 'crossbarfabriccenter.remote.router.get_router_realm', node_oid, worker_id, realm_oid) if not stdscr: pprint(realm) # get per-realm messaging statistics realm_stats = yield session.call( 'crossbarfabriccenter.remote.router.get_router_realm_stats', node_oid, worker_id, realm_oid) if not stdscr: pprint(realm_stats) realm_id = realm['id'] realm_name = realm['config'][ 'name'] realm_created = realm['created'] ri_obj = { 'node_oid': node_oid, 'worker_id': worker_id, 'id': realm_id, 'name': realm_name, 'created': realm_created, 'rlinks': len([ 1 for rlink in realm['rlinks'] if rlink['connected'] ]), } sw_latest = '20.6.2.dev2' in node_status[ 'title'] if sw_latest: # get IDs of all rlinks running in this router worker and realm rlink_oids = yield session.call( 'crossbarfabriccenter.remote.router.get_router_realm_links', node_oid, worker_id, realm_oid) ri_obj['rlinks'] = len( rlink_oids) # {'realm001': {'messages': {'received': {'publish': 39, 'register': 42}, # 'sent': {'registered': 42}}, # 'roles': 4, # 'sessions': 2}} received = realm_stats[realm_id][ 'messages']['received'] sent = realm_stats[realm_id][ 'messages']['sent'] total = 0 for k in received: total += received[k] for k in sent: total += sent[k] ri_obj['messages'] = total ri_obj['received'] = realm_stats[ realm_id]['messages'][ 'received'] ri_obj['sent'] = realm_stats[ realm_id]['messages']['sent'] ri_obj['sessions'] = realm_stats[ realm_id]['sessions'] ri_obj['roles'] = realm_stats[ realm_id]['roles'] if realm_name not in router_info: router_info[realm_name] = [ ri_obj ] else: router_info[realm_name].append( ri_obj) else: worker_info = {} router_info = {} # started = '-' last_heartbeat_ago = '-' node_title = '-' cpu_user = 0 cpu_system = 0 cpu_idle = 0 # memory_total = 0 # memory_avail = 0 memory_perc = 0 # network_recv = 0 # network_sent = 0 network_conns = 0 if stdscr: def fmt(data, key): val = data.get(key, 0) if val: return '{0: >4}'.format( val), curses.color_pair(41) else: return ' -', curses.color_pair(8) x = 0 stdscr.addstr(y, x, node_title) x += 34 stdscr.addstr(y, x, mrealm, curses.color_pair(14)) x += 15 stdscr.addstr(y, x, node_authid, curses.color_pair(14)) x += 25 stdscr.addstr(y, x, node_oid, curses.color_pair(14)) x += 40 if node['status'] == 'online': stdscr.addstr(y, x, node['status'], curses.color_pair(41)) else: stdscr.addstr(y, x, node['status'], curses.color_pair(10)) x += 10 stdscr.addstr(y, x, last_heartbeat_ago) x += 12 def fmt2(val): return '{0: >4}'.format( val), curses.color_pair(8) x += 4 stdscr.addstr(y, x + 0, *fmt2(round(cpu_user, 1))) stdscr.addstr(y, x + 5, *fmt2(round(cpu_system, 1))) stdscr.addstr(y, x + 10, *fmt2(round(cpu_idle, 1))) x += 1 stdscr.addstr(y, x + 15, *fmt2(round(memory_perc, 1))) x += 1 stdscr.addstr(y, x + 20, '{0: >10}'.format(network_conns)) x += 5 * 5 x += 10 stdscr.addstr(y, x + 0, *fmt(worker_info, 'proxy')) stdscr.addstr(y, x + 4, *fmt(worker_info, 'router')) stdscr.addstr(y, x + 8, *fmt(worker_info, 'marketplace')) stdscr.addstr(y, x + 12, *fmt(worker_info, 'container')) stdscr.addstr(y, x + 16, *fmt(worker_info, 'guest')) x += 4 * 5 # cpu_user = node_system_stats['cpu']['user'] # cpu_system = node_system_stats['cpu']['system'] # cpu_idle = node_system_stats['cpu']['idle'] # memory_total = node_system_stats['memory']['total'] # memory_avail = node_system_stats['memory']['available'] # memory_perc = node_system_stats['memory']['percent'] # network_recv = node_system_stats['network']['bytes_recv_per_sec'] # network_sent = node_system_stats['network']['bytes_sent_per_sec'] # network_conns = node_system_stats['network']['connection']['AF_INET'] roles = 0 sessions = 0 messages = 0 rlinks = 0 for realm_id in router_info: for realm_obj in router_info[realm_id]: roles += realm_obj['roles'] sessions += realm_obj['sessions'] messages += realm_obj['messages'] rlinks += realm_obj['rlinks'] if stdscr: x += 4 stdscr.addstr( y, x + 0, '{0: >4}'.format(len(router_info.keys())), curses.color_pair(227)) stdscr.addstr(y, x + 5, '{0: >4}'.format(roles)) stdscr.addstr(y, x + 10, '{0: >4}'.format(rlinks)) stdscr.addstr(y, x + 15, '{0: >10}'.format(sessions), curses.color_pair(41)) stdscr.addstr(y, x + 25, '{0: >10}'.format(messages), curses.color_pair(41)) x += 5 * 5 y += 1 last_mrealm = mrealm if stdscr: stdscr.addstr(y, 0, '=' * 240) y += 1 stdscr.refresh() yield sleep(5) except Exception as e: sys.stderr.write(str(e)) sys.exit(1)
def new_key(): return np.datetime64(time_ns(), 'ns'), uuid.uuid4(), ''
async def _authenticate(self, realm, authid, details, call_details): self.log.info( '{klass}.authenticate(realm="{realm}", authid="{authid}", details={details})', klass=self.__class__.__name__, realm=realm, authid=authid, details=details) if 'authmethod' not in details: msg = 'missing "authmethod" in authentication details (WAMP HELLO message details)' raise ApplicationError( self.ERROR_INVALID_AUTH_REQUEST, self.ERROR_INVALID_AUTH_REQUEST_MSG.format(msg)) authmethod = details['authmethod'] if authmethod != 'cryptosign': msg = 'authmethod "{}" not permissible'.format(authmethod) raise ApplicationError( self.ERROR_INVALID_AUTH_REQUEST, self.ERROR_INVALID_AUTH_REQUEST_MSG.format(msg)) if 'authextra' not in details: msg = 'Must provide authextra for authmethod cryptosign' raise ApplicationError( self.ERROR_INVALID_AUTH_REQUEST, self.ERROR_INVALID_AUTH_REQUEST_MSG.format(msg)) authextra = details['authextra'] if 'pubkey' not in authextra: msg = 'missing public key in authextra for authmethod cryptosign' raise ApplicationError( self.ERROR_INVALID_AUTH_REQUEST, self.ERROR_INVALID_AUTH_REQUEST_MSG.format(msg)) pubkey = authextra['pubkey'] if isinstance(pubkey, str): pubkey = binascii.a2b_hex(without_0x(pubkey)) assert is_cs_pubkey(pubkey) session_id = details['session'] assert type(session_id) == int # FIXME: find a more elegant way to query the db. def get_actor(_txn, address): _actor = self._schema.actors[_txn, (self._market_oid, address, ActorType.PROVIDER)] if _actor: return _actor _actor = self._schema.actors[_txn, (self._market_oid, address, ActorType.CONSUMER)] if _actor: return _actor _actor = self._schema.actors[_txn, (self._market_oid, address, ActorType.PROVIDER_CONSUMER)] if _actor: return _actor if ('wallet_address' not in authextra or not authextra['wallet_address']) and \ ('signature' not in authextra or not authextra['signature']): with self._db.begin() as txn: user_key = self._xbrmm.user_keys[txn, pubkey] actor = None if user_key: actor = get_actor(txn, bytes(user_key.wallet_address)) if actor: authrole = 'user' authid = 'member-{}'.format( binascii.b2a_hex(user_key.wallet_address).decode()) else: authrole = 'anonymous' authid = 'anonymous-{}'.format( generate_serial_number()) else: authrole = 'anonymous' authid = 'anonymous-{}'.format(generate_serial_number()) self._pubkey_by_session[session_id] = pubkey auth = { 'pubkey': binascii.b2a_hex(pubkey), 'realm': realm, 'authid': authid, 'role': authrole, 'cache': True, 'extra': { 'actor_type': actor.actor_type if actor else 0 } } self.log.info('{klass}.authenticate(..) => {auth}', klass=self.__class__.__name__, auth=auth) return auth if ('wallet_address' not in authextra or not authextra['wallet_address']) or \ ('signature' not in authextra or not authextra['signature']): msg = 'Should provide `pubkey`, `wallet_address` and `signature` in authextra ' \ 'to authenticate new member. To authenticate existing member, only provide ' \ '`pubkey`' raise ApplicationError( self.ERROR_INVALID_AUTH_REQUEST, self.ERROR_INVALID_AUTH_REQUEST_MSG.format(msg)) wallet_address = authextra['wallet_address'] assert is_address(wallet_address) signature = authextra['signature'] assert is_signature(signature) try: signer_address = recover_eip712_market_member_login( wallet_address, pubkey, signature) except Exception as e: self.log.warn( 'EIP712 signature recovery failed (wallet_adr={wallet_adr}): {err}', wallet_adr=wallet_address, err=str(e)) raise ApplicationError( 'xbr.error.invalid_signature', 'EIP712 signature recovery failed ({})'.format(e)) if signer_address != wallet_address: self.log.warn( 'EIP712 signature invalid: signer_address={signer_address}, wallet_adr={wallet_adr}', signer_address=signer_address, wallet_adr=wallet_address) raise ApplicationError('xbr.error.invalid_signature', 'EIP712 signature invalid') with self._db.begin(write=True) as txn: account = self._schema.members[txn, wallet_address] actor = None if account: actor = get_actor(txn, wallet_address) if actor: user_key = self._xbrmm.user_keys[txn, pubkey] if not user_key: user_key = cfxdb.xbrmm.UserKey() user_key.owner = account.account_oid user_key.pubkey = pubkey user_key.created = np.datetime64(txaio.time_ns(), 'ns') user_key.wallet_address = wallet_address user_key.signature = signature self._xbrmm.user_keys[txn, pubkey] = user_key self._pubkey_by_session[session_id] = pubkey authrole = 'user' # authid = 'member-{}'.format(account.account_oid) # account.account_oid returns a pseudo value because # the "emit" from the xbr contracts does not include # account_oid in it, hence we don't really have that. # To compensate that, we could include wallet address # in authid, so that API calls could validate # if the caller really is the "owner" of a resource. authid = 'member-{}'.format( binascii.b2a_hex(wallet_address).decode()) else: authrole = 'anonymous' authid = 'anonymous-{}'.format(generate_serial_number()) else: authrole = 'anonymous' authid = 'anonymous-{}'.format(generate_serial_number()) auth = { 'pubkey': binascii.b2a_hex(pubkey), 'realm': realm, 'authid': authid, 'role': authrole, 'cache': True, 'extra': { 'actor_type': actor.actor_type if actor else 0 } } self.log.info('{klass}.authenticate(..) => {auth}', klass=self.__class__.__name__, auth=auth) return auth