def get_contact_identity(idurl): """ The Main Method Here - return identity object for given ID or None if not found. Only valid contacts for packets will be signed by local identity, suppliers, customers. """ if idurl is None: return None idurl = strng.to_bin(idurl.strip()) if idurl == my_id.getLocalID(): return my_id.getLocalIdentity() if is_supplier(idurl): return identitycache.FromCache(idurl) if is_customer(idurl): return identitycache.FromCache(idurl) if is_correspondent(idurl): return identitycache.FromCache(idurl) if identitycache.HasKey(idurl): # lg.warn("who is %s ?" % nameurl.GetName(idurl)) return identitycache.FromCache(idurl) lg.warn("%s is NOT FOUND IN CACHE" % idurl) # TODO: # this is not correct: # need to check if other contacts is fine - if internet is turned off we can get lots fails ... return None
def AppendBinaryFile(filename, data, mode='a'): """ Same as WriteBinaryFile but do not erase previous data in the file. TODO: this is not atomic right now """ try: f = open(filename, mode) if 'b' in mode: bin_data = strng.to_bin(data) f.write(bin_data) else: f.write(data) f.flush() os.fsync(f.fileno()) f.close() except: lg.exc() try: # make sure file gets closed f.close() except: lg.exc() return False return True
def doStart(self, arg): """ Action method. """ options = { 'idurl': my_id.getLocalID(), } id_contact = '' default_host = '' # ident = my_id.getLocalIdentity() # if ident: # id_contact = ident.getContactsByProto().get(self.proto, '') # if id_contact: # assert id_contact.startswith(self.proto + '://') # id_contact = id_contact.lstrip(self.proto + '://') if self.proto == 'tcp': if not id_contact: default_host = strng.to_bin( misc.readExternalIP()) + b':' + strng.to_bin( str(settings.getTCPPort())) options['host'] = id_contact or default_host options['tcp_port'] = settings.getTCPPort() elif self.proto == 'udp': if not id_contact: default_host = strng.to_bin(nameurl.GetName( my_id.getLocalID())) + b'@' + strng.to_bin(platform.node()) options['host'] = id_contact or default_host options['dht_port'] = settings.getDHTPort() options['udp_port'] = settings.getUDPPort() elif self.proto == 'proxy': pass elif self.proto == 'http': if not id_contact: default_host = strng.to_bin( misc.readExternalIP()) + b':' + strng.to_bin( str(settings.getHTTPPort())) options['host'] = id_contact or default_host options['http_port'] = settings.getHTTPPort() if _Debug: lg.out( 8, 'network_transport.doStart connecting %s transport : %s' % (self.proto.upper(), options)) self.interface.connect(options)
def doStart(self, *args, **kwargs): """ Action method. """ options = { 'idurl': my_id.getLocalID(), } id_contact = '' default_host = '' if self.proto == 'tcp': if not id_contact: default_host = strng.to_bin( misc.readExternalIP()) + b':' + strng.to_bin( str(settings.getTCPPort())) options['host'] = id_contact or default_host options['tcp_port'] = settings.getTCPPort() elif self.proto == 'udp': if not id_contact: default_host = strng.to_bin(nameurl.GetName( my_id.getLocalID())) + b'@' + strng.to_bin(platform.node()) options['host'] = id_contact or default_host options['dht_port'] = settings.getDHTPort() options['udp_port'] = settings.getUDPPort() elif self.proto == 'proxy': pass elif self.proto == 'http': if not id_contact: default_host = strng.to_bin( misc.readExternalIP()) + b':' + strng.to_bin( str(settings.getHTTPPort())) options['host'] = id_contact or default_host options['http_port'] = settings.getHTTPPort() if _Debug: lg.out( 8, 'network_transport.doStart connecting %s transport : %s' % (self.proto.upper(), options)) self.interface.connect(options)
def dumps(obj, indent=None, separators=None, sort_keys=None, ensure_ascii=False, encoding='utf-8', keys_to_text=False, values_to_text=False, empty_result='{}', **kw): """ Calls `json.dumps()` with parameters. Always translates every byte string json value into text using encoding. """ if obj is None or obj == '' or obj == b'': return empty_result enc_errors = kw.pop('errors', 'strict') def _to_text(v): if strng.is_bin(v): v = v.decode(encoding, errors=enc_errors) if not strng.is_text(v): v = strng.to_text(v) return v if keys_to_text: obj = dict_keys_to_text(obj, encoding=encoding, errors=enc_errors) if values_to_text: obj = dict_values_to_text(obj, encoding=encoding, errors=enc_errors) try: if sys.version_info[0] < 3: return json.dumps(obj=obj, indent=indent, separators=separators, sort_keys=sort_keys, ensure_ascii=ensure_ascii, default=_to_text, encoding=encoding, **kw) else: return json.dumps(obj=obj, indent=indent, separators=separators, sort_keys=sort_keys, ensure_ascii=ensure_ascii, default=_to_text, **kw) except Exception as exc: if _Debug: import os import tempfile fd, _ = tempfile.mkstemp(suffix='err', prefix='jsn_dumps_', text=True) try: os.write(fd, repr(obj)) except: try: os.write(fd, strng.to_bin(repr(type(obj)))) except: os.write(fd, b'failed to serialize object') os.close(fd) raise exc
def is_customer(idurl): """ Return True if given ID is found in customers list. """ return strng.to_bin(idurl.strip()) in customers()
def sha256(inp, hexdigest=False): s = strng.to_bin(inp) h = SHA256.new(s) if hexdigest: return h.hexdigest() return h.digest()
def doSendMyIdentity(self, *args, **kwargs): """ Action method. """ global _KnownChannels self.ping_attempts += 1 if self.fake_identity: identity_object = self.fake_identity else: identity_object = my_id.getLocalIdentity() if not identity_object.Valid(): raise Exception('can not use invalid identity for ping') if self.channel_counter: packet_id = '%s:%d:%d:%s' % (self.channel, _KnownChannels[ self.channel], self.ping_attempts, packetid.UniqueID()) else: packet_id = '%s:%d:%s' % (self.channel, self.ping_attempts, packetid.UniqueID()) ping_packet = signed.Packet( Command=commands.Identity(), OwnerID=my_id.getLocalID(), CreatorID=my_id.getLocalID(), PacketID=packet_id, Payload=strng.to_bin(identity_object.serialize()), RemoteID=self.remote_idurl, ) if self.skip_outbox: packet_out.create( outpacket=ping_packet, wide=True, response_timeout=self.ack_timeout, callbacks={ commands.Ack(): lambda response, info: self.automat( 'ack-received', response=response, info=info), commands.Fail(): lambda response, info: self.automat( 'fail-received', response=response, info=info), None: lambda pkt_out: self.automat('ack-timeout', pkt_out), }, keep_alive=self.keep_alive, ) else: gateway.outbox( outpacket=ping_packet, wide=True, response_timeout=self.ack_timeout, callbacks={ commands.Ack(): lambda response, info: self.automat( 'ack-received', response=response, info=info), commands.Fail(): lambda response, info: self.automat( 'fail-received', response=response, info=info), None: lambda pkt_out: self.automat('ack-timeout', pkt_out), }, keep_alive=self.keep_alive, ) if _Debug: lg.args(_DebugLevel, packet_id=packet_id, remote_idurl=self.remote_idurl, ping_attempts=self.ping_attempts)
def buildDefaultIdentity(name='', ip='', idurls=[]): """ Use some local settings and config files to create some new identity. Nice to provide a user name or it will have a form like: [ip_address]_[date]. """ if not ip: ip = misc.readExternalIP() if not name: name = ip.replace('.', '-') + '_' + time.strftime('%M%S') lg.out(4, 'my_id.buildDefaultIdentity: %s %s' % (name, ip)) # create a new identity object # it is stored in memory and another copy on disk drive ident = identity.identity(xmlsrc=identity.default_identity_src) # this is my IDURL address # you can have many IDURL locations for same identity # just need to keep all them synchronized # this is identity propagate procedure, see p2p/propagate.py if len(idurls) == 0: idurls.append(b'http://127.0.0.1/%s.xml' % strng.to_bin(name.lower())) for idurl in idurls: ident.sources.append(strng.to_bin(idurl.strip())) # create a full list of needed transport methods # to be able to accept incoming traffic from other nodes new_contacts, new_order = buildProtoContacts(ident) if len(new_contacts) == 0: if settings.enableTCP() and settings.enableTCPreceiving(): new_contacts['tcp'] = b'tcp://' + strng.to_bin( ip) + b':' + strng.to_bin(str(settings.getTCPPort())) new_order.append('tcp') if settings.enableUDP() and settings.enableUDPreceiving(): _, servername, _, _ = nameurl.UrlParse(ident.sources[0]) new_contacts['udp'] = b'udp://' + strng.to_bin( name.lower()) + b'@' + strng.to_bin(servername) new_order.append('udp') if settings.enableHTTP() and settings.enableHTTPreceiving(): new_contacts['http'] = b'http://' + strng.to_bin( ip) + b':' + strng.to_bin(str(settings.getHTTPPort())) new_order.append('http') # erase current contacts from my identity ident.clearContacts() # add contacts data to the local identity for proto in new_order: contact = new_contacts.get(proto, None) if contact is None: lg.warn('proto %s was not found in contacts' % proto) continue ident.setProtoContact(proto, contact) # set other info # ident.certificates = [] ident.setDate(time.strftime('%b %d, %Y')) ident.setPostage(1) ident.setRevision(0) ident.setVersion('') # TODO: put latest git commit hash here # update software version number # version_number = bpio.ReadTextFile(settings.VersionNumberFile()).strip() # repo, location = misc.ReadRepoLocation() # ident.version = (version_number.strip() + ' ' + repo.strip() + ' ' + bpio.osinfo().strip()).strip() # build a version info # vernum = bpio.ReadTextFile(settings.VersionNumberFile()) # repo, location = misc.ReadRepoLocation() # ident.version = (vernum.strip() + ' ' + repo.strip() + ' ' + bpio.osinfo().strip()).strip() # put my public key in my identity ident.setPublicKey(key.MyPublicKey()) # generate signature ident.sign() # validate new identity if not ident.Valid(): lg.warn('generated identity is not valid !!!') return ident
def UnpackListFiles(payload, method): if method == "Text": return payload elif method == "Compressed": return strng.to_text(zlib.decompress(strng.to_bin(payload))) return payload
def _do_forward_inbox_packet(self, *args, **kwargs): # encrypt with proxy_receiver()'s key and sent to man behind my proxy receiver_idurl, newpacket, info = args[0] route_info = self.routes.get(receiver_idurl, None) if not route_info: lg.warn('route with %s not found for inbox packet: %s' % (receiver_idurl, newpacket)) return hosts = route_info['address'] if len(hosts) == 0: lg.warn( 'route with %s do not have actual info about the host, use identity contacts instead' % receiver_idurl) hosts = route_info['contacts'] if len(hosts) == 0: lg.warn('has no known contacts for route with %s' % receiver_idurl) return if len(hosts) > 1: lg.warn('found more then one channel with receiver %s : %r' % ( receiver_idurl, hosts, )) receiver_proto, receiver_host = strng.to_bin( hosts[0][0]), strng.to_bin(hosts[0][1]) publickey = route_info['publickey'] block = encrypted.Block( CreatorID=my_id.getLocalID(), BackupID='routed incoming data', BlockNumber=0, SessionKey=key.NewSessionKey(), SessionKeyType=key.SessionKeyType(), LastBlock=True, Data=newpacket.Serialize(), EncryptKey=lambda inp: key.EncryptOpenSSHPublicKey(publickey, inp), ) raw_data = block.Serialize() routed_packet = signed.Packet( commands.Relay(), newpacket.OwnerID, my_id.getLocalID(), newpacket.PacketID, raw_data, receiver_idurl, ) pout = packet_out.create( newpacket, wide=False, callbacks={}, route={ 'packet': routed_packet, 'proto': receiver_proto, 'host': receiver_host, 'remoteid': receiver_idurl, 'description': ('Relay_%s[%s]_%s' % (newpacket.Command, newpacket.PacketID, nameurl.GetName(receiver_idurl))), }, ) if _Debug: lg.out( _DebugLevel, '<<<Relay-IN-OUT %s %s:%s' % ( str(newpacket), info.proto, info.host, )) lg.out( _DebugLevel, ' sent to %s://%s with %d bytes in %s' % (receiver_proto, receiver_host, len(raw_data), pout)) del raw_data del block del newpacket del routed_packet
def key_to_hash(key): key = strng.to_bin(key) h = hashlib.sha1() h.update(key) return h.digest()
def ParseGlobalID(inp, detect_version=False, as_field=True, fast=True): """ Split input string by parts according to different global ID formats: For such input (global resource path): "[email protected]:myfiles/animals/cat.png#F20160313043757PM" returns such dictionary object: { "user": "******", "key_alias": "group_abc", "key_id": "[email protected]", "idhost": "first-machine.com", "customer": "*****@*****.**", "idurl": b"http://first-machine.com/alice.xml", "path": "myfiles/animals/cat.png", "version": "F20160313043757PM", } For such input (global path ID) with `detect_version=True`: "[email protected]:1/2/3/F20160313043757PM/4-5-Parity" returns such dictionary object: { "user": "******", "key_alias": "group_abc", "key_id": "[email protected]", "idhost": "first-machine.com", "customer": "*****@*****.**", "idurl": b"http://first-machine.com/alice.xml", "path": "1/2/3/F20160313043757PM/4-5-Parity", "version": "F20160313043757PM", } """ result = { "user": "", "key_alias": "", "key_id": "", "idhost": "", "customer": "", "idurl": b'', "path": "", "version": "", } if not inp: if as_field: from userid import id_url result['idurl'] = id_url.field(result['idurl']) return result inp = strng.to_text(inp) if inp.count('&') == 2: # this is GLOBAL_ID_QUEUE_ID format : just need to get rid of the last supplier_id part and # translate it into GLOBAL_ID_KEY_USER format inp, _, _ = inp.strip().rpartition('&') inp = inp.replace('&', '$') if inp.count(':'): user, _, path = inp.strip().rpartition(':') else: if inp.count('@'): user = inp path = '' else: user = '' path = inp if user: user_and_key, _, idhost = user.strip().rpartition('@') if not user_and_key or not idhost: return result try: if fast: _key_alias, _, _user = user_and_key.rpartition('$') result['key_alias'] = _key_alias result['user'] = _user else: user_key = re.match(_REGEX_GLOBAL_ID_KEY_USER, user_and_key) if not user_key: user_key = re.match(_REGEX_GLOBAL_ID_USER_KEY, user_and_key) if user_key: result['user'] = user_key.group('user') result['key_alias'] = user_key.group('key_alias') else: result['user'] = user_and_key except: return result result['idhost'] = idhost if result['idhost'].count('_'): _pos = result['idhost'].rfind('_') port = result['idhost'][_pos + 1:] try: port = int(port) except: port = -1 if port >= 0: result['idhost'] = "%s:%d" % (result['idhost'][:_pos], port) if result['user'] and result['idhost']: result['idurl'] = strng.to_bin('http://{}/{}.xml'.format( result['idhost'], result['user'])) result['customer'] = '{}@{}'.format( result['user'], result['idhost'].replace(':', '_')) if path: if path.count('#'): path, _, version = path.rpartition('#') result['version'] = version result['path'] = path if detect_version: try: from lib import packetid backupID, _, fileName = path.rpartition('/') if packetid.IsPacketNameCorrect(fileName): _, _, versionName = backupID.rpartition('/') result['version'] = versionName except: pass if not result['key_alias']: result['key_alias'] = 'master' if result['customer']: result['key_id'] = MakeGlobalKeyID(result['key_alias'], result['customer']) if as_field: from userid import id_url result['idurl'] = id_url.field(result['idurl']) return result
def doSubstituteSupplier(self, *args, **kwargs): """ Action method. """ new_idurl = strng.to_bin(args[0]) family_position = kwargs.get('family_position') current_suppliers = list(contactsdb.suppliers()) old_idurl = None if family_position in self.hire_list: self.hire_list.remove(family_position) lg.info('found position on which new supplier suppose to be hired: %d' % family_position) else: lg.warn('did not found position for new supplier to be hired on') if new_idurl in current_suppliers: raise Exception('%s is already supplier' % new_idurl) if not family_position: lg.warn('unknown family_position from supplier results, will pick first empty spot') position = -1 old_idurl = None for i in range(len(current_suppliers)): if not current_suppliers[i].strip(): position = i break if current_suppliers[i] in self.dismiss_list: # self.dismiss_list.remove(current_suppliers[i]) position = i old_idurl = current_suppliers[i] break family_position = position lg.out(10, 'fire_hire.doSubstituteSupplier family_position=%d' % family_position) contactsdb.add_supplier(idurl=new_idurl, position=family_position) contactsdb.save_suppliers() misc.writeSupplierData( new_idurl, 'connected', time.strftime('%d-%m-%Y %H:%M:%S'), my_id.getLocalID(), ) from main import control control.on_suppliers_changed(current_suppliers) if family_position < 0: lg.out(2, '!!!!!!!!!!! ADDED NEW SUPPLIER : %s' % new_idurl) events.send('supplier-modified', dict( new_idurl=new_idurl, old_idurl=None, position=family_position, ecc_map=eccmap.Current().name, family_snapshot=contactsdb.suppliers(), )) else: if old_idurl: lg.out(2, '!!!!!!!!!!! SUBSTITUTE EXISTING SUPPLIER %d : %s->%s' % (family_position, old_idurl, new_idurl)) events.send('supplier-modified', dict( new_idurl=new_idurl, old_idurl=old_idurl, position=family_position, ecc_map=eccmap.Current().name, family_snapshot=contactsdb.suppliers(), )) else: lg.out(2, '!!!!!!!!!!! REPLACE EMPTY SUPPLIER %d : %s' % (family_position, new_idurl)) events.send('supplier-modified', dict( new_idurl=new_idurl, old_idurl=None, position=family_position, ecc_map=eccmap.Current().name, family_snapshot=contactsdb.suppliers(), )) self.restart_interval = 1.0
def transform_key(self, key): return md5(strng.to_bin(key)).digest()
def get_customer_meta_info(customer_idurl): """ """ global _CustomersMetaInfo customer_idurl = strng.to_bin(customer_idurl.strip()) return _CustomersMetaInfo.get(customer_idurl, {})
def is_correspondent(idurl): """ Return True if given ID is found in correspondents list. """ return strng.to_bin(idurl.strip()) in correspondents_ids()
def is_supplier(idurl, customer_idurl=None): """ Return True if given ID is found in suppliers list. """ return idurl and strng.to_bin( idurl.strip()) in suppliers(customer_idurl=customer_idurl)
def validate_before_store(key, value, originalPublisherID, age, expireSeconds, **kwargs): try: json_new_value = json.loads(value) except: # not a json data to be written - this is not valid lg.exc() raise ValueError('input data is not a json value') if _Debug: lg.out( _DebugLevel, 'dht_service.validate_before_store key=[%s] json=%r' % ( base64.b64encode(key), json_new_value, )) new_record_type = json_new_value.get('type') if not new_record_type: if _Debug: lg.out( _DebugLevel, ' new json data do not have "type" field present, store operation FAILED' ) raise ValueError('input data do not have "type" field present') if key not in node()._dataStore: if _Debug: lg.out(_DebugLevel, ' previous value not exists yet, store OK') return True prev_value = node()._dataStore[key] try: json_prev_value = json.loads(prev_value) except: if _Debug: lg.out( _DebugLevel, ' current value in DHT is not a json data, will be overwritten, store OK' ) return True prev_record_type = json_prev_value.get('type') if prev_record_type and prev_record_type != new_record_type: if _Debug: lg.out( _DebugLevel, ' new json data type did not match to existing record type, store operation FAILED' ) raise ValueError( 'new json data type do not match to existing record type') # TODO: need to include "key" field into DHT record and validate it as well # new_record_key = json_new_value.get('key') # if not new_record_key: # if _Debug: # lg.out(_DebugLevel, ' new json data do not have "key" field present, store operation FAILED') # return False # if new_record_key != key: # if _Debug: # lg.out(_DebugLevel, ' new json data do not have "key" field set properly, store operation FAILED') # return False # prev_record_key = json_prev_value.get('key') # if prev_record_key and prev_record_key != new_record_key: # if _Debug: # lg.out(_DebugLevel, ' new json data "key" field do not match to existing record "key", store operation FAILED') # return False try: prev_revision = int(json_prev_value['revision']) except: prev_revision = -1 try: new_revision = int(json_new_value['revision']) except: new_revision = -1 if prev_revision >= 0: if new_revision < 0: if _Debug: lg.out( _DebugLevel, ' new json data must have a revision, store operation FAILED' ) raise ValueError('new json data must have a revision') if new_revision < prev_revision: if _Debug: lg.out( _DebugLevel, ' new json data must increment revision number, store operation FAILED' ) raise ValueError('new json data must increment revision number') if new_revision == prev_revision: if prev_record_type == 'suppliers': prev_ecc_map = json_prev_value.get('ecc_map') new_ecc_map = json_new_value.get('ecc_map') if prev_ecc_map and new_ecc_map != prev_ecc_map: if _Debug: lg.out( _DebugLevel, ' new json data have same revision but different ecc_map, store operation FAILED' ) raise ValueError( 'new json data have same revision but different ecc_map' ) prev_suppliers = [ strng.to_bin(idurl.strip()) for idurl in json_prev_value.get('suppliers', []) ] new_suppliers = [ strng.to_bin(idurl.strip()) for idurl in json_new_value.get('suppliers', []) ] if prev_suppliers != new_suppliers: if _Debug: lg.out( _DebugLevel, ' new json data have same revision but different suppliers list, store operation FAILED' ) raise ValueError( 'new json data have same revision but different suppliers list' ) if _Debug: lg.out( _DebugLevel, ' new json data is valid and matching existing DHT record, store OK' ) return True
def _on_incoming_contacts_packet(self, newpacket, info): from logs import lg from lib import serialization from lib import strng from supplier import family_member from userid import my_id try: json_payload = serialization.BytesToDict(newpacket.Payload, keys_to_text=True) contacts_type = strng.to_text(json_payload['type']) contacts_space = strng.to_text(json_payload['space']) except: lg.exc() return False if contacts_space != 'family_member': return False if contacts_type == 'suppliers_list': try: customer_idurl = strng.to_bin(json_payload['customer_idurl']) ecc_map = strng.to_text(json_payload['customer_ecc_map']) suppliers_list = list( map(strng.to_bin, json_payload['suppliers_list'])) transaction_revision = json_payload.get('transaction_revision') except: lg.exc() return False if customer_idurl == my_id.getLocalIDURL(): lg.warn('received contacts for my own customer family') return False fm = family_member.by_customer_idurl(customer_idurl) if not fm: lg.warn( 'family_member() instance not found for incoming %s from %s for customer %r' % ( newpacket, info, customer_idurl, )) return False fm.automat( 'contacts-received', { 'type': contacts_type, 'packet': newpacket, 'customer_idurl': customer_idurl, 'customer_ecc_map': ecc_map, 'suppliers_list': suppliers_list, 'transaction_revision': transaction_revision, }) return True elif contacts_type == 'supplier_position': try: customer_idurl = strng.to_bin(json_payload['customer_idurl']) ecc_map = strng.to_text(json_payload['customer_ecc_map']) supplier_idurl = strng.to_bin(json_payload['supplier_idurl']) supplier_position = json_payload['supplier_position'] family_snapshot = json_payload.get('family_snapshot') except: lg.exc() return False if customer_idurl == my_id.getLocalIDURL(): lg.warn('received contacts for my own customer family') return False fm = family_member.by_customer_idurl(customer_idurl) if not fm: lg.warn( 'family_member() instance not found for incoming %s from %s for customer %r' % ( newpacket, info, customer_idurl, )) return False fm.automat( 'contacts-received', { 'type': contacts_type, 'packet': newpacket, 'customer_idurl': customer_idurl, 'customer_ecc_map': ecc_map, 'supplier_idurl': supplier_idurl, 'supplier_position': supplier_position, 'family_snapshot': family_snapshot, }) return True return False
def PackListFiles(plaintext, method): if method == "Text": return plaintext elif method == "Compressed": return zlib.compress(strng.to_bin(plaintext)) return ''
def SendToIDs(idlist, wide=False, ack_handler=None, timeout_handler=None, response_timeout=20): """ Same, but send to many IDs and also check previous packets to not re-send. """ global _PropagateCounter if _Debug: lg.out( _DebugLevel, "propagate.SendToIDs to %d users, wide=%s" % (len(idlist), wide)) if ack_handler is None: ack_handler = HandleAck if timeout_handler is None: timeout_handler = HandleTimeOut LocalIdentity = my_id.getLocalIdentity() Payload = strng.to_bin(LocalIdentity.serialize()) alreadysent = set() totalsent = 0 inqueue = {} found_previous_packets = 0 for pkt_out in packet_out.queue(): if id_url.is_in(pkt_out.remote_idurl, idlist, as_field=False): if pkt_out.description.count('Identity'): if pkt_out.remote_idurl not in inqueue: inqueue[pkt_out.remote_idurl] = 0 inqueue[pkt_out.remote_idurl] += 1 found_previous_packets += 1 for contact in idlist: if not contact: continue if contact in alreadysent: # just want to send once even if both customer and supplier continue if contact in inqueue and inqueue[contact] > 2: # now only 2 protocols is working: tcp and udp if _Debug: lg.out( _DebugLevel, ' skip sending [Identity] to %s, packet already in the queue' % contact) continue p = signed.Packet( Command=commands.Identity(), OwnerID=my_id.getLocalID(), CreatorID=my_id.getLocalID(), PacketID=('propagate:%d:%s' % (_PropagateCounter, packetid.UniqueID())), Payload=Payload, RemoteID=contact, ) _PropagateCounter += 1 if _Debug: lg.out( _DebugLevel, " sending [Identity] to %s" % nameurl.GetName(contact)) gateway.outbox(p, wide, response_timeout=response_timeout, callbacks={ commands.Ack(): ack_handler, commands.Fail(): ack_handler, None: timeout_handler, }) if wide: # this is a ping packet - need to clear old info p2p_stats.ErasePeerProtosStates(contact) p2p_stats.EraseMyProtosStates(contact) alreadysent.add(contact) totalsent += 1 del alreadysent return totalsent
def isCurrentInterfaceActive(self, *args, **kwargs): # I am not sure about external IP, # because if you have a white IP it should be the same with your local IP return (strng.to_bin(misc.readLocalIP()) in args[0]) or (strng.to_bin(misc.readExternalIP()) in args[0])
def SendToIDs(idlist, wide=False, ack_handler=None, timeout_handler=None, response_timeout=20): """ Same, but send to many IDs and also check previous packets to not re-send. """ lg.out(8, "propagate.SendToIDs to %d users, wide=%s" % (len(idlist), wide)) if ack_handler is None: ack_handler = HandleAck if timeout_handler is None: timeout_handler = HandleTimeOut # MyID = my_id.getLocalID() # PacketID = MyID LocalIdentity = my_id.getLocalIdentity() Payload = strng.to_bin(LocalIdentity.serialize()) # Hash = key.Hash(Payload) alreadysent = set() totalsent = 0 inqueue = {} found_previous_packets = 0 for pkt_out in packet_out.queue(): if pkt_out.remote_idurl in idlist: if pkt_out.description.count('Identity'): if pkt_out.remote_idurl not in inqueue: inqueue[pkt_out.remote_idurl] = 0 inqueue[pkt_out.remote_idurl] += 1 found_previous_packets += 1 for contact in idlist: if not contact: continue if contact in alreadysent: # just want to send once even if both customer and supplier continue if contact in inqueue and inqueue[contact] > 2: # now only 2 protocols is working: tcp and udp lg.out( 8, ' skip sending [Identity] to %s, packet already in the queue' % contact) continue # found_previous_packets = 0 # for transfer_id in gate.transfers_out_by_idurl().get(contact, []): # ti = gate.transfers_out().get(transfer_id, None) # if ti and ti.description.count('Identity'): # found_previous_packets += 1 # break # if found_previous_packets >= 3: # lg.out(8, ' skip sending to %s' % contact) # continue p = signed.Packet( commands.Identity(), my_id.getLocalID(), # MyID, my_id.getLocalID(), # MyID, commands.Identity( ), #'Identity', # my_id.getLocalID(), #PacketID, Payload, contact, ) lg.out(8, " sending [Identity] to %s" % nameurl.GetName(contact)) # callback.register_interest(AckHandler, signed.RemoteID, signed.PacketID) gateway.outbox(p, wide, response_timeout=response_timeout, callbacks={ commands.Ack(): ack_handler, commands.Fail(): ack_handler, None: timeout_handler, }) if wide: # this is a ping packet - need to clear old info p2p_stats.ErasePeerProtosStates(contact) p2p_stats.EraseMyProtosStates(contact) alreadysent.add(contact) totalsent += 1 del alreadysent return totalsent
def _push(self): from transport import gateway if self.route: # if this packet is routed - send directly to route host gateway.send_file( strng.to_bin(self.route['remoteid']), strng.to_text(self.route['proto']), strng.to_bin(self.route['host']), self.filename, self.description, self, ) self.items.append( WorkItem(strng.to_text(self.route['proto']), strng.to_bin(self.route['host']), self.filesize)) self.automat('items-sent') return # get info about his local IP localIP = identitycache.GetLocalIP(self.remote_idurl) workitem_sent = False if self.wide: # send to all his contacts for contactmethod in self.remote_identity.getContacts(): proto, host = nameurl.IdContactSplit(contactmethod) if host.strip() and \ settings.transportIsEnabled(proto) and \ settings.transportSendingIsEnabled(proto) and \ gateway.can_send(proto) and \ gateway.is_installed(proto): if proto == 'tcp' and localIP: host = localIP gateway.send_file( strng.to_bin(self.remote_idurl), strng.to_text(proto), strng.to_bin(host), self.filename, self.description, self, ) self.items.append( WorkItem(strng.to_text(proto), strng.to_bin(host), self.filesize)) workitem_sent = True if not workitem_sent: self.automat('nothing-to-send') lg.warn('(wide) no supported protocols with %s' % self.remote_idurl) else: self.automat('items-sent') return # send to one of his contacts, # now need to decide which transport to use # let's prepare his contacts first byproto = self.remote_identity.getContactsByProto() tcp_contact = None if settings.enableTCP() and settings.enableTCPsending(): tcp_contact = byproto.get('tcp', None) udp_contact = None if settings.enableUDP() and settings.enableUDPsending(): udp_contact = byproto.get('udp', None) http_contact = None if settings.enableHTTP() and settings.enableHTTPsending(): http_contact = byproto.get('http', None) proxy_contact = None if settings.enablePROXY() and settings.enablePROXYsending(): proxy_contact = byproto.get('proxy', None) working_protos = p2p_stats.peers_protos().get(self.remote_idurl, set()) # tcp seems to be the most stable proto # now let's check if we know his local IP and # he enabled tcp in his settings to be able to receive packets from others # try to send to his local IP first, not external if tcp_contact and localIP: if gateway.is_installed('tcp') and gateway.can_send(proto): proto, host, port, fn = nameurl.UrlParse(tcp_contact) if port: host = localIP + ':' + str(port) gateway.send_file(strng.to_bin(self.remote_idurl), strng.to_text(proto), strng.to_bin(host), self.filename, self.description, self) self.items.append( WorkItem(strng.to_text(proto), strng.to_bin(host), self.filesize)) self.automat('items-sent') return # tcp is the best proto - if it is working - this is the best case!!! if tcp_contact and 'tcp' in working_protos: proto, host, port, fn = nameurl.UrlParse(tcp_contact) if host.strip() and gateway.is_installed( proto) and gateway.can_send(proto): if port: host = host + ':' + str(port) gateway.send_file(strng.to_bin(self.remote_idurl), strng.to_text(proto), strng.to_bin(host), self.filename, self.description) self.items.append( WorkItem(strng.to_text(proto), strng.to_bin(host), self.filesize)) self.automat('items-sent') return # udp contact if udp_contact and 'udp' in working_protos: proto, host = nameurl.IdContactSplit(udp_contact) if host.strip() and gateway.is_installed( 'udp') and gateway.can_send(proto): gateway.send_file(strng.to_bin(self.remote_idurl), strng.to_text(proto), strng.to_bin(host), self.filename, self.description, self) self.items.append( WorkItem(strng.to_text(proto), strng.to_bin(host), self.filesize)) self.automat('items-sent') return # http contact if http_contact and 'http' in working_protos: proto, host, port, _ = nameurl.UrlParse(http_contact) if host.strip() and gateway.is_installed( proto) and gateway.can_send(proto): if port: host = host + ':' + str(port) gateway.send_file(strng.to_bin(self.remote_idurl), strng.to_text(proto), strng.to_bin(host), self.filename, self.description, self) self.items.append( WorkItem(strng.to_text(proto), strng.to_bin(host), self.filesize)) self.automat('items-sent') return # proxy contact - he may use other node to receive and send packets if proxy_contact and 'proxy' in working_protos: proto, host = nameurl.IdContactSplit(proxy_contact) if host.strip() and gateway.is_installed( 'proxy') and gateway.can_send(proto): gateway.send_file(strng.to_bin(self.remote_idurl), strng.to_text(proto), strng.to_bin(host), self.filename, self.description, self) self.items.append( WorkItem(strng.to_text(proto), strng.to_bin(host), self.filesize)) self.automat('items-sent') return # finally use the first proto we supported if we can not find the best preferable method for contactmethod in self.remote_identity.getContacts(): proto, host, port, fn = nameurl.UrlParse(contactmethod) if port: host = host + ':' + str(port) # if method exist but empty - don't use it if host.strip(): # try sending with tcp even if it is switched off in the settings if gateway.is_installed(proto) and gateway.can_send(proto): if settings.enableTransport( proto) and settings.transportSendingIsEnabled( proto): gateway.send_file(strng.to_bin(self.remote_idurl), strng.to_text(proto), strng.to_bin(host), self.filename, self.description, self) self.items.append( WorkItem(strng.to_text(proto), strng.to_bin(host), self.filesize)) self.automat('items-sent') return self.automat('nothing-to-send') lg.warn('no supported protocols with %s : %s %s %s, byproto:%s' % (self.remote_idurl, tcp_contact, udp_contact, working_protos, str(byproto)))
def doStartListening(self, *args, **kwargs): """ Action method. """ try: _, info = args[0] self.router_proto_host = (info.proto, info.host) except: try: # TODO: move that setting to separate file s = config.conf().getString( 'services/proxy-transport/current-router').strip() _, router_proto, router_host = s.split(' ') self.router_proto_host = ( router_proto, strng.to_bin(router_host), ) except: lg.exc() self.router_identity = identitycache.FromCache(self.router_idurl) config.conf().setString( 'services/proxy-transport/current-router', '%s %s %s' % ( strng.to_text(self.router_idurl), strng.to_text(self.router_proto_host[0]), strng.to_text(self.router_proto_host[1]), )) current_identity = my_id.getLocalIdentity().serialize(as_text=True) previous_identity = ReadMyOriginalIdentitySource() if previous_identity: lg.warn('my original identity is not empty, SKIP overwriting') if _Debug: lg.out( _DebugLevel, '\nPREVIOUS ORIGINAL IDENTITY:\n%s\n' % current_identity) else: WriteMyOriginalIdentitySource(current_identity) lg.warn('current identity was stored as my-original-identity') self.request_service_packet_id = [] callback.insert_inbox_callback(0, self._on_inbox_packet_received) if online_status.isKnown(self.router_idurl): online_status.add_online_status_listener_callback( idurl=self.router_idurl, callback_method=self._on_router_contact_status_connected, newstate='CONNECTED', ) online_status.add_online_status_listener_callback( idurl=self.router_idurl, callback_method=self._on_router_contact_status_offline, newstate='OFFLINE', ) # contact_status.A(self.router_idurl).addStateChangedCallback( # self._on_router_contact_status_connected, newstate='CONNECTED') # contact_status.A(self.router_idurl).addStateChangedCallback( # self._on_router_contact_status_offline, newstate='OFFLINE') active_router_sessions = gateway.find_active_session( info.proto, info.host) if active_router_sessions: self.router_connection_info = { 'id': active_router_sessions[0].id, 'index': active_router_sessions[0].index, 'proto': info.proto, 'host': info.host, 'idurl': self.router_idurl, 'global_id': global_id.UrlToGlobalID(self.router_idurl), } active_router_session_machine = automat.objects().get( self.router_connection_info['index'], None) if active_router_session_machine is not None: active_router_session_machine.addStateChangedCallback( self._on_router_session_disconnected, oldstate='CONNECTED') lg.info( 'connected to proxy router and set active session: %s' % self.router_connection_info) else: lg.err('not found proxy router session state machine: %s' % self.router_connection_info['index']) else: lg.err( 'active connection with proxy router at %s:%s was not found' % ( info.proto, info.host, )) if _Debug: lg.out( _DebugLevel, 'proxy_receiver.doStartListening !!!!!!! router: %s at %s://%s' % (self.router_idurl, self.router_proto_host[0], self.router_proto_host[1]))
def ping( idurl, ack_timeout=15, cache_timeout=5, cache_retries=2, ping_retries=2, force_cache=False, skip_outbox=False, keep_alive=True, fake_identity=None, channel='identity', channel_counter=True, ): """ Doing peer-to-peer ping with acknowledgment and return `Deferred` object to receive result. First read remote identity file from `idurl` location. Then sending my own identity to remote node and wait for ack. If Ack() packet from remote node times out (or another error happened) should return failed result in the result `Deferred`. """ global _RunningHandshakers remote_idurl = strng.to_bin(idurl) if not remote_idurl: raise Exception('empty idurl provided') result = Deferred() if remote_idurl in _RunningHandshakers: _RunningHandshakers[remote_idurl]['results'].append(result) if _Debug: lg.args( _DebugLevel, already_opened=True, idurl=remote_idurl, channel=channel, skip_outbox=skip_outbox, ) return result _RunningHandshakers[remote_idurl] = { 'instance': None, 'results': [ result, ], } if _Debug: lg.args( _DebugLevel, already_opened=False, idurl=remote_idurl, channel=channel, skip_outbox=skip_outbox, ) h = Handshaker( remote_idurl=remote_idurl, ack_timeout=ack_timeout, cache_timeout=cache_timeout, cache_retries=cache_retries, ping_retries=ping_retries, skip_outbox=skip_outbox, keep_alive=keep_alive, fake_identity=fake_identity, channel=channel, channel_counter=channel_counter, debug_level=_DebugLevel, log_events=_Debug, log_transitions=_Debug, ) _RunningHandshakers[remote_idurl]['instance'] = h if force_cache: h.automat('cache-and-ping') else: h.automat('ping') return result
def rebuildLocalIdentity(identity_object=None, skip_transports=[], new_sources=None, revision_up=False, new_revision=None, save_identity=True): """ If some transports was enabled or disabled we want to update identity contacts. Just empty all of the contacts and create it again in the same order. Also increase revision number by one - others may keep track of my modifications. """ # remember the current identity - full XML source code current_identity_xmlsrc = getLocalIdentity().serialize() if _Debug: lg.out(_DebugLevel, 'my_id.rebuildLocalIdentity current identity is %d bytes long new_revision=%r' % ( len(current_identity_xmlsrc), new_revision,)) # getting a copy of local identity to be modified or another object to be used lid = identity_object or identity.identity(xmlsrc=current_identity_xmlsrc) # create a full list of needed transport methods # to be able to accept incoming traffic from other nodes new_contacts, new_order = buildProtoContacts(lid, skip_transports=skip_transports) # erase current contacts from my identity lid.clearContacts() # add contacts data to the local identity lid.setContactsFromDict(new_contacts, new_order) # if I need to rotate my sources do it now if new_sources: lid.setSources(new_sources) # update software version number # TODO: need to read GIT commit hash here instead of version vernum = strng.to_bin(bpio.ReadTextFile(settings.VersionNumberFile())).strip() # repo, _ = misc.ReadRepoLocation() repo = 'sources' lid.setVersion((vernum + b' ' + strng.to_bin(repo.strip()) + b' ' + strng.to_bin(bpio.osinfo().strip()).strip())) # generate signature with changed content lid.sign() new_xmlsrc = lid.serialize() changed = False if new_xmlsrc != current_identity_xmlsrc or revision_up or new_revision: if not new_revision: new_revision = lid.getRevisionValue() + 1 try: lid.setRevision(new_revision) except: lg.exc() return False # generate signature again because revision were changed !!! lid.sign() lg.info('incremented my identity revision: %d' % lid.getRevisionValue()) changed = True else: # no modifications in my identity - cool !!! if _Debug: lg.out(_DebugLevel, ' same revision: %d' % lid.getRevisionValue()) if _Debug: lg.out(_DebugLevel, ' version: %r' % lid.version) lg.out(_DebugLevel, ' contacts: %r' % lid.contacts) lg.out(_DebugLevel, ' sources: %r' % lid.getSources(as_originals=True)) if changed: if save_identity: # finally saving modified local identity if _Debug: lg.out(_DebugLevel, ' SAVING new identity #%s' % lid.revision) # remember the new identity setLocalIdentity(lid) saveLocalIdentity() # NOW TEST IT! # forgetLocalIdentity() # loadLocalIdentity() if _Debug: lg.out(_DebugLevel, ' LOCAL IDENTITY CORRECT: %r' % getLocalIdentity().isCorrect()) lg.out(_DebugLevel, ' LOCAL IDENTITY VALID: %r' % getLocalIdentity().Valid()) lg.info('my identity HAS %sBEEN changed' % (('' if changed else 'NOT '))) if _Debug: lg.out(_DebugLevel, '\n' + strng.to_text(lid.serialize()) + '\n') return changed
def rebuildLocalIdentity(identity_object=None, skip_transports=[], revision_up=False, save_identity=True): """ If some transports was enabled or disabled we want to update identity contacts. Just empty all of the contacts and create it again in the same order. Also increase revision number by one - others may keep track of my modifications. """ # getting current copy of local identity lid = identity_object or getLocalIdentity() # remember the current identity - full XML source code current_identity_xmlsrc = lid.serialize() lg.out( 4, 'my_id.rebuildLocalIdentity current identity is %d bytes long' % len(current_identity_xmlsrc)) # create a full list of needed transport methods # to be able to accept incoming traffic from other nodes new_contacts, new_order = buildProtoContacts( lid, skip_transports=skip_transports) # erase current contacts from my identity lid.clearContacts() # add contacts data to the local identity lid.setContactsFromDict(new_contacts, new_order) # for proto in new_order: # contact = new_contacts.get(proto, None) # if contact is None: # lg.warn('proto %s was not found in contacts' % proto) # continue # lid.setProtoContact(proto, contact) # update software version number vernum = strng.to_bin(bpio.ReadTextFile( settings.VersionNumberFile())).strip() repo, _ = misc.ReadRepoLocation() lid.setVersion((vernum + b' ' + strng.to_bin(repo.strip()) + b' ' + strng.to_bin(bpio.osinfo().strip()).strip())) # generate signature with changed content lid.sign() new_xmlsrc = lid.serialize() changed = False if new_xmlsrc != current_identity_xmlsrc or revision_up: try: lid.setRevision(int(strng.to_text(lid.revision)) + 1) except: lg.exc() return False # generate signature again because revision were changed !!! lid.sign() lg.out(4, ' incremented revision: %s' % lid.revision) changed = True # remember the new identity if save_identity: setLocalIdentity(lid) else: # no modifications in my identity - cool !!! lg.out(4, ' same revision: %r' % lid.revision) lg.out(4, ' version: %r' % lid.version) lg.out(4, ' contacts: %r' % lid.contacts) lg.out(4, ' sources: %r' % lid.sources) if changed: lg.out(4, ' SAVING new identity #%s' % lid.revision) # finally saving modified local identity if save_identity: saveLocalIdentity() # NOW TEST IT! forgetLocalIdentity() loadLocalIdentity() lg.out( 4, ' LOCAL IDENTITY CORRECT: %r' % getLocalIdentity().isCorrect()) lg.out(4, ' LOCAL IDENTITY VALID: %r' % getLocalIdentity().Valid()) lg.info('my identity HAS %sBEEN changed' % (('' if changed else 'NOT '))) lg.out(4, '\n' + strng.to_text(lid.serialize()) + '\n') return changed
def md5(inp, hexdigest=False): s = strng.to_bin(inp) h = MD5.new(s) if hexdigest: return h.hexdigest() return h.digest()