def doTestIdleDays(self, *args, **kwargs): """ Action method. """ dead_customers = [] customer_idle_days = config.conf().getInt( 'services/customer-patrol/customer-idle-days', 0) if not customer_idle_days: self.automat('no-idle-customers') return for customer_idurl in contactsdb.customers(): connected_time = ratings.connected_time(customer_idurl.to_bin()) if connected_time is None: lg.warn( 'last connected_time for customer %r is unknown, rejecting customer' % customer_idurl) dead_customers.append(customer_idurl) continue if utime.get_sec1970( ) - connected_time > customer_idle_days * 24 * 60 * 60: lg.warn( 'customer %r connected last time %r seconds ago, rejecting customer' % ( customer_idurl, utime.get_sec1970() - connected_time, )) dead_customers.append(customer_idurl) if dead_customers: lg.warn('found idle customers: %r' % dead_customers) self.automat('found-idle-customers', dead_customers) else: lg.info( 'all customers has some activity recently, no idle customers found' ) self.automat('no-idle-customers')
def doRememberTime(self, *args, **kwargs): """ Action method. """ to_be_remembered = True if self.latest_inbox_time: if utime.get_sec1970() - self.latest_inbox_time < 5 * 60: to_be_remembered = False if to_be_remembered: ratings.remember_connected_time(self.idurl.to_bin()) self.latest_inbox_time = utime.get_sec1970()
def set_suppliers(customer_idurl, ecc_map, suppliers_list, revision=None, publisher_idurl=None, expire=60 * 60): if _Debug: lg.args(_DebugLevel, customer_idurl=customer_idurl, ecc_map=ecc_map, suppliers_list=suppliers_list, revision=revision) return dht_service.set_valid_data( key=dht_service.make_key( key=strng.to_text(customer_idurl), prefix='suppliers', ), json_data={ 'type': 'suppliers', 'timestamp': utime.get_sec1970(), 'revision': 0 if revision is None else revision, 'publisher_idurl': publisher_idurl.to_text() if publisher_idurl else None, 'customer_idurl': customer_idurl.to_text(), 'ecc_map': ecc_map, 'suppliers': list(map(lambda i: i.to_text(), suppliers_list)), }, rules=get_rules('suppliers'), expire=expire, collect_results=True, )
def doReadQueue(self, *args, **kwargs): """ Action method. """ result = message.send_message( json_data={ 'created': utime.get_sec1970(), 'payload': 'queue-read', 'last_sequence_id': self.last_sequence_id, 'queue_id': self.active_queue_id, 'consumer_id': self.member_id, }, recipient_global_id=self.active_broker_id, packet_id='queue_%s_%s' % (self.active_queue_id, packetid.UniqueID()), message_ack_timeout=config.conf().getInt( 'services/private-groups/message-ack-timeout'), skip_handshake=True, fire_callbacks=False, ) if _Debug: result.addErrback(lg.errback, debug=_Debug, debug_level=_DebugLevel, method='group_member.doReadQueue') result.addErrback(lambda err: self.automat('queue-read-failed', err))
def set_message_broker(customer_idurl, broker_idurl, position=0, revision=None, expire=60 * 60): if _Debug: lg.args(_DebugLevel, customer=customer_idurl, pos=position, broker=broker_idurl, rev=revision) return dht_service.set_valid_data( key=dht_service.make_key( key='%s%d' % (strng.to_text(customer_idurl), position), prefix='message_broker', ), json_data={ 'type': 'message_broker', 'timestamp': utime.get_sec1970(), 'revision': 0 if revision is None else revision, 'customer_idurl': customer_idurl.to_text(), 'broker_idurl': broker_idurl.to_text(), # 'archive_folder_path': archive_folder_path, 'position': position, }, rules=get_rules('message_broker'), expire=expire, collect_results=True, )
def push_group_message(json_message, direction, group_key_id, producer_id, sequence_id): for consumer_id in consumers_callbacks().keys(): if consumer_id not in message_queue(): message_queue()[consumer_id] = [] message_queue()[consumer_id].append({ 'type': 'group_message', 'dir': direction, 'to': group_key_id, 'from': producer_id, 'data': json_message, 'packet_id': sequence_id, 'owner_idurl': None, 'time': utime.get_sec1970(), }) if _Debug: lg.out( _DebugLevel, 'message.push_group_message "%d" at group "%s", %d pending messages for consumer %s' % ( sequence_id, group_key_id, len(message_queue()[consumer_id]), consumer_id, )) # reactor.callLater(0, do_read) # @UndefinedVariable do_read() return True
def __init__(self, parent, callOnReceived, creatorID, packetID, ownerID, remoteID, debug_level=_DebugLevel, log_events=_Debug, log_transitions=_Debug, publish_events=False, **kwargs): """ Builds `file_down()` state machine. """ self.parent = parent self.callOnReceived = [] self.callOnReceived.append(callOnReceived) self.creatorID = creatorID self.packetID = global_id.CanonicalID(packetID) parts = global_id.ParseGlobalID(packetID) self.customerID = parts['customer'] self.remotePath = parts['path'] self.customerIDURL = parts['idurl'] customerGlobalID, remotePath, versionName, fileName = packetid.SplitVersionFilename(packetID) self.backupID = packetid.MakeBackupID(customerGlobalID, remotePath, versionName) self.fileName = fileName self.ownerID = ownerID self.remoteID = remoteID self.requestTime = None self.fileReceivedTime = None self.requestTimeout = max(30, 2 * int(settings.getBackupBlockSize() / settings.SendingSpeedLimit())) self.result = '' self.created = utime.get_sec1970() super(FileDown, self).__init__( name="file_down_%s_%s/%s/%s" % (nameurl.GetName(self.remoteID), remotePath, versionName, fileName), state="AT_STARTUP", debug_level=debug_level, log_events=log_events, log_transitions=log_transitions, publish_events=publish_events, **kwargs )
def push_outgoing_message(json_message, private_message_object, remote_identity, request, result): """ """ for consumer_id in consumers_callbacks().keys(): if consumer_id not in message_queue(): message_queue()[consumer_id] = [] message_queue()[consumer_id].append({ 'type': 'private_message', 'dir': 'outgoing', 'to': private_message_object.recipient_id(), 'from': private_message_object.sender_id(), 'data': json_message, 'id': request.PacketID, 'time': utime.get_sec1970(), }) if _Debug: lg.out( _DebugLevel, 'message.push_outgoing_message "%s" for consumer "%s", %d pending messages' % (request.PacketID, consumer_id, len(message_queue()[consumer_id]))) reactor.callLater(0, pop_messages) # @UndefinedVariable
def __init__(self, fileName, packetID, remoteID, ownerID, callOnAck=None, callOnFail=None): self.fileName = fileName try: self.fileSize = os.path.getsize(os.path.abspath(fileName)) except: lg.exc() self.fileSize = 0 self.packetID = global_id.CanonicalID(packetID) parts = global_id.ParseGlobalID(packetID) self.customerID = parts['customer'] self.remotePath = parts['path'] self.customerIDURL = parts['idurl'] customerGlobalID, remotePath, versionName, _ = packetid.SplitVersionFilename( packetID) self.backupID = packetid.MakeBackupID(customerGlobalID, remotePath, versionName) self.remoteID = remoteID self.ownerID = ownerID self.callOnAck = callOnAck self.callOnFail = callOnFail self.sendTime = None self.ackTime = None self.sendTimeout = 10 * 2 * ( max(int(self.fileSize / settings.SendingSpeedLimit()), 5) + 5 ) # maximum 5 seconds to get an Ack self.result = '' self.created = utime.get_sec1970() PacketReport('send', self.remoteID, self.packetID, 'init')
def expire(self): now = utime.get_sec1970() expired_keys = [] h = hashlib.sha1() h.update(b'nodeState') nodeStateKey = h.hexdigest() for key in self._dataStore.keys(): if key == nodeStateKey: continue item_data = self._dataStore.getItem(key) if item_data: originaly_published = item_data.get('originallyPublished') expireSeconds = item_data.get('expireSeconds') if expireSeconds and originaly_published: age = now - originaly_published if age > expireSeconds: expired_keys.append(key) for key in expired_keys: if _Debug: lg.out( _DebugLevel, 'dht_service.expire [%s] removed' % base64.b32encode(key)) del self._dataStore[key]
def _stop_marker(self): if self.mining_started < 0: return True if self.mining_counts >= self.max_mining_counts: return True if utime.get_sec1970() - self.mining_started > self.max_mining_seconds: return True self.mining_counts += 1 return False
def doStartMining(self, arg): """ Action method. """ self.mining_started = utime.get_sec1970() d = self._start(arg, "1111242458feb550512fb19bb6127bd4cd8ef2cb") d.addCallback(lambda result: self.automat("coin-mined", result)) d.addErrback(lambda err: self.automat("stop")) d.addErrback(lambda err: lg.exc(exc_value=err))
def RunOfflineChecks(): for o_status in list(_OnlineStatusDict.values()): if o_status.state != 'OFFLINE': # if user is online or checking: do nothing continue if not o_status.latest_check_time: # if no checks done yet but he is offline: ping user o_status.automat('offline-check') continue if utime.get_sec1970() - o_status.latest_check_time > 10 * 60: # user is offline and latest check was sent a while ago: lets try to ping user again o_status.automat('offline-check') continue if o_status.latest_inbox_time and utime.get_sec1970() - o_status.latest_inbox_time < 60: # user is offline, but we know that he was online recently: lets try to ping him again o_status.automat('offline-check') continue return True
def isRecentInbox(self, *args, **kwargs): """ Condition method. """ if handshaker.is_running(self.idurl.to_bin()): return True if not self.latest_inbox_time: return False return utime.get_sec1970() - self.latest_inbox_time > 60
def doStartMining(self, arg): """ Action method. """ self.mining_started = utime.get_sec1970() d = self._start(arg) d.addCallback(self._on_coin_mined) d.addErrback(lambda err: self.automat('stop')) d.addErrback(lambda err: lg.exc(exc_value=err))
def to_json(self): j = { 'name': self.model_name, 'id': self.snap_id, 'data': self.data, 'created': self.created, } if self.deleted: j['deleted'] = utime.get_sec1970() return j
def __init__(self, model_name, snap_id=None, data=None, created=None, deleted=False): self.model_name = model_name self.snap_id = snap_id self.data = data self.created = created or utime.get_sec1970() self.deleted = deleted
def doWriteState(self, *args, **kwargs): """ Action method. """ # store queue keeper info locally here to be able to start up again after application restart write_state(customer_id=self.customer_id, broker_id=self.broker_id, json_value={ 'state': self.state, 'position': self.known_position, 'cooperated_brokers': self.cooperated_brokers, 'streams': self.known_streams, 'time': utime.get_sec1970(), })
def set_nickname(key, idurl): if _Debug: lg.args(_DebugLevel, key, idurl) nickname, _, pos = key.partition(':') json_data={ 'type': 'nickname', 'timestamp': utime.get_sec1970(), 'idurl': idurl.to_bin(), 'nickname': nickname, 'position': pos, } return dht_service.set_valid_data(key=key, json_data=json_data, rules=get_rules('nickname'), )
def __init__(self, parent, fileName, packetID, remoteID, ownerID, callOnAck=None, callOnFail=None, debug_level=_DebugLevel, log_events=_Debug, log_transitions=_Debug, publish_events=False, **kwargs): """ Builds `file_up()` state machine. """ self.parent = parent self.fileName = fileName try: self.fileSize = os.path.getsize(os.path.abspath(fileName)) except: lg.exc() self.fileSize = 0 self.packetID = global_id.CanonicalID(packetID) parts = global_id.ParseGlobalID(packetID) self.customerID = parts['customer'] self.remotePath = parts['path'] self.customerIDURL = parts['idurl'] customerGlobalID, remotePath, versionName, fileName = packetid.SplitVersionFilename( packetID) self.backupID = packetid.MakeBackupID(customerGlobalID, remotePath, versionName) self.remoteID = remoteID self.ownerID = ownerID self.callOnAck = callOnAck self.callOnFail = callOnFail self.sendTime = None self.ackTime = None self.sendTimeout = 10 * 2 * ( max(int(self.fileSize / settings.SendingSpeedLimit()), 5) + 5 ) # maximum 5 seconds to get an Ack self.result = '' self.created = utime.get_sec1970() super(FileUp, self).__init__(name="file_up_%s_%s/%s/%s" % (nameurl.GetName( self.remoteID), remotePath, versionName, fileName), state="AT_STARTUP", debug_level=debug_level, log_events=log_events, log_transitions=log_transitions, publish_events=publish_events, **kwargs)
def set_identity(idurl, raw_xml_data): if _Debug: lg.args(_DebugLevel, idurl) return dht_service.set_valid_data( key=idurl, json_data={ 'type': 'identity', 'timestamp': utime.get_sec1970(), 'idurl': idurl, 'identity': raw_xml_data, }, rules=get_rules('identity'), )
def set_relation(key, idurl, data, prefix, index): if _Debug: lg.args(_DebugLevel, key, idurl, prefix, index) return dht_service.set_valid_data( key=key, json_data={ 'type': 'relation', 'timestamp': utime.get_sec1970(), 'idurl': idurl, 'index': index, 'prefix': prefix, 'data': data, }, rules=get_rules('relation'), expire=60 * 5, )
def __init__(self, producer_id, queue_id, json_data, created=None): self.message_id = make_message_id() self.producer_id = producer_id self.queue_id = queue_id self.created = created or utime.get_sec1970() self.payload = json_data self.state = 'CREATED' self.notifications = {} self.success_notifications = 0 self.failed_notifications = 0 self.consumers = [] for consumer_id in consumer(): if queue_id in consumer(consumer_id).queues: self.consumers.append(consumer_id) if len(self.consumers) == 0: if _Debug: lg.warn('message will have no consumers')
def _do_send_message_to_broker(self, json_payload=None, packet_id=None): if packet_id is None: packet_id = packetid.UniqueID() if _Debug: lg.args(_DebugLevel, json_payload=json_payload, packet_id=packet_id) raw_payload = serialization.DictToBytes( json_payload, pack_types=True, encoding='utf-8', ) try: private_message_object = message.GroupMessage( recipient=self.group_key_id, sender=self.producer_id, ) private_message_object.encrypt(raw_payload) except: lg.exc() raise Exception('message encryption failed') encrypted_payload = private_message_object.serialize() d = message.send_message( json_data={ 'msg_type': 'queue_message', 'action': 'produce', 'created': utime.get_sec1970(), 'payload': encrypted_payload, 'queue_id': self.active_queue_id, 'producer_id': self.producer_id, }, recipient_global_id=self.active_broker_id, packet_id=packetid.MakeQueueMessagePacketID( self.active_queue_id, packet_id), message_ack_timeout=config.conf().getInt( 'services/private-groups/message-ack-timeout'), skip_handshake=True, fire_callbacks=False, ) if _Debug: d.addErrback(lg.errback, debug=_Debug, debug_level=_DebugLevel, method='message_producer._do_send_message_to_broker')
def push_incoming_message(request, private_message_object, decrypted_message_body): """ """ for consumer_id in consumers_callbacks().keys(): if consumer_id not in message_queue(): message_queue()[consumer_id] = [] message_queue()[consumer_id].append({ 'type': 'private_message', 'dir': 'incoming', 'to': private_message_object.recipient_id(), 'from': private_message_object.sender_id(), 'body': decrypted_message_body, 'id': request.PacketID, 'time': utime.get_sec1970(), }) if _Debug: lg.out(_DebugLevel, 'message.push_incoming_message "%s" for consumer "%s", %d pending messages' % ( request.PacketID, consumer_id, len(message_queue()[consumer_id]))) reactor.callLater(0, pop_messages)
def push_outgoing_message(json_message, private_message_object, remote_identity, request, result): """ """ for consumer_id in consumers_callbacks().keys(): if consumer_id not in message_queue(): message_queue()[consumer_id] = [] msg_type = 'private_message' if request.PacketID.startswith('queue_'): msg_type = 'queue_message' elif request.PacketID.startswith('qreplica_'): msg_type = 'queue_message_replica' message_queue()[consumer_id].append({ 'type': msg_type, 'dir': 'outgoing', 'to': private_message_object.recipient_id(), 'from': private_message_object.sender_id(), 'data': json_message, 'packet_id': request.PacketID, 'owner_idurl': request.OwnerID, 'time': utime.get_sec1970(), }) if _Debug: lg.out( _DebugLevel, 'message.push_outgoing_message "%s" for consumer "%s", %d pending messages for consumer %r' % ( request.PacketID, consumer_id, len(message_queue()[consumer_id]), consumer_id, )) # reactor.callLater(0, do_read) # @UndefinedVariable do_read() return False
def push_message(direction, msg_type, recipient_id, sender_id, packet_id, owner_idurl, json_message, run_consumers=True): """ """ for consumers_callback_id in consumers_callbacks().keys(): if consumers_callback_id not in message_queue(): message_queue()[consumers_callback_id] = [] message_queue()[consumers_callback_id].append({ 'type': msg_type, 'dir': direction, 'to': recipient_id, 'from': sender_id, 'data': json_message, 'packet_id': packet_id, 'owner_idurl': owner_idurl, 'time': utime.get_sec1970(), }) if _Debug: lg.args(_DebugLevel, dir=direction, msg_type=msg_type, to_id=recipient_id, from_id=sender_id, cb=consumers_callback_id, pending=len(message_queue()[consumers_callback_id])) if not run_consumers: return 0 total_consumed = do_read() return total_consumed > 0
def expire(self): now = utime.get_sec1970() expired_keys = [] for key in self._dataStore.keys(): if key == b'nodeState': continue item_data = self._dataStore.getItem(key) if item_data: originaly_published = item_data.get('originallyPublished') expireSeconds = item_data.get('expireSeconds') if expireSeconds and originaly_published: age = now - originaly_published if age > expireSeconds: expired_keys.append(key) for key in expired_keys: if _Debug: lg.out(_DebugLevel, 'dht_service.expire [%s] removed' % base64.b32encode(key)) del self._dataStore[key] if _DebugLevel <= 10: lg.out(_DebugLevel, 'DHT counters last %d sec: %s' % (int(KEY_EXPIRE_MIN_SECONDS / 2), drop_counters()))
def __init__(self, callOnReceived, creatorID, packetID, ownerID, remoteID): self.callOnReceived = [] self.callOnReceived.append(callOnReceived) self.creatorID = creatorID self.packetID = global_id.CanonicalID(packetID) parts = global_id.ParseGlobalID(packetID) self.customerID = parts['customer'] self.remotePath = parts['path'] self.customerIDURL = parts['idurl'] customerGlobalID, remotePath, versionName, fileName = packetid.SplitVersionFilename(packetID) self.backupID = packetid.MakeBackupID(customerGlobalID, remotePath, versionName) self.fileName = fileName self.ownerID = ownerID self.remoteID = remoteID self.requestTime = None self.fileReceivedTime = None self.requestTimeout = max(30, 2 * int(settings.getBackupBlockSize() / settings.SendingSpeedLimit())) self.result = '' self.created = utime.get_sec1970() PacketReport('request', self.remoteID, self.packetID, 'init')
def set_relation(key, idurl, data, prefix, index, expire=60 * 60): # TODO: set_relation() is OBSOLETE... # because of performance reasonse it is better to maintain only one DHT record for each relation exclusively # need to use another solution here instead of storing multiple records... # check out family_memeber() if _Debug: lg.args(_DebugLevel, key, idurl, prefix, index) return dht_service.set_valid_data( key=key, json_data={ 'type': 'relation', 'timestamp': utime.get_sec1970(), 'idurl': idurl, 'index': index, 'prefix': prefix, 'data': data, }, rules=get_rules('relation'), expire=expire, )
def set_suppliers(customer_idurl, ecc_map, suppliers_list, revision=None, publisher_idurl=None, expire=60 * 60): return dht_service.set_valid_data( key=dht_service.make_key( key=strng.to_text(customer_idurl), prefix='suppliers', ), json_data={ 'type': 'suppliers', 'timestamp': utime.get_sec1970(), 'revision': revision, 'publisher_idurl': publisher_idurl, 'customer_idurl': customer_idurl, 'ecc_map': ecc_map, 'suppliers': suppliers_list, }, rules=get_rules('suppliers'), expire=expire, collect_results=True, )