def do_sub(self,ch): check.check_is_text(ch) # Relevance: @R50. if self.subs.has_key(ch) or self.ads.has_key(ch): raise error.Error(_("erroneous subscription")) self.ads[ch]={'type': 'channel exists', 'name': ch, 'keywords':[]} #me = self.app.name_server.get_info() #self.subs[ch]={'channel': ch} #for field in ('type', 'key', 'name'): # self.subs[ch][field]=me[field] #del self.subs[ch]['channel'] #me = self.app.name_server.get_info() self.subs[ch] = lambda self=self: self.app.name_server.get_info() # - has to be a distinct object to allow unpublish self.node.publish(hash.hash_of('channel exists'), self.ads[ch],settings.channel_redundancy) self.node.publish(hash.hash_of('channel subscribe '+ch), self.subs[ch],settings.channel_redundancy)
def verify(self, str,sig): self.lock.acquire() try: if not crypto.RSA.construct(self.info['key']).verify(hash.hash_of(str),sig): raise error.Error(_('Bad signature.')) finally: self.lock.release()
def name_server_watch_poller_thread(self): # every 10 minutes, retrieve and update the list of people watching me while self.running: pipe = self.node.retrieve(hash.hash_of('watch '+self.public_key_name), settings.identity_redundancy) watcher = { 'type' : 'watch', 'whom' : self.public_key_name } try: while not pipe.finished(): for item in pipe.read_all(): if item[1] == watcher: if item[0] not in self.watchers: utility.start_thread( name_server_add_watcher_thread(self,item[0])) if not self.running: return # Ugh yuck bleah yield 'sleep',2 except: pass pipe.stop() yield 'sleep',10*60
def check_and_demangle_item(item): """Return demangled item if it is a valid mangled item, otherwise throw an exception (not necessarily of class error.Error). Ensures: either an exception is thrown or ret is a dictionary whose keys include 'type'.""" # note: not sure if this ough to be here... import crypto.RSA import hash import safe_pickle item_type = item['type'] if item_type == 'identity offline demangled': raise error.Error('impostor') elif item_type == 'identity offline': demangle = safe_pickle.loads(item['package']) if not crypto.RSA.construct(demangle['key']).verify( hash.hash_of(item['package']),item['signature']): raise error.Error('bad signature') demangle['type'] = 'identity offline demangled' return demangle else: return item
def __init__(self, app, node, random_func): utility.Task_manager.__init__(self) self.app = app self.node = node self.key = utility.get_config("private_key",None) if self.key == None: random = random_func() self.key = crypto.RSA.deconstruct(crypto.RSA.generate(1024,random.randfunc)) random.finish() utility.set_config("private_key",self.key) self.key = crypto.RSA.construct(self.key) self.public_key = crypto.RSA.deconstruct(self.key.publickey()) self.public_key_name = key_name(self.public_key) self.public_key_name_offline = hash.hash_of('identity-offline '+key_name(self.public_key)) self.name = None self.info = { } self.status = { } # eg chat status stored under 'chat' key # For searching for all people self.service_name = hash.hash_of("service identity") self.acquaintances = { } self.nicknames = { } self.watchers = [ ] #the people who are watching me self.watch_callbacks = [ ] self.start_time = time.time() self.get_info_func = lambda self=self: self.app.name_server.get_info() # - has to be a distinct object to allow unpublish self.aborted = 0 self.disconnect_threads = 0 # Proof of @R.I22: @E25 self.check_invar()
def publish_auction(self, title, category, description, initial_price=0): info = { 'type' : 'auction', 'category' : category, 'title' : title, 'seller' : self.app.node.name, 'description': description } name = hash.hash_of(safe_pickle.dumps(info)) self.auctions[name] = info self.auctions_prices[name] = initial_price keywords = string.split(string.lower(title)) for item in keywords: self.node.publish(hash.hash_of('auction-name '+ item), info, settings.identity_redundancy) self.node.publish(hash.hash_of('auction-category '+ category), info, settings.identity_redundancy)
def post_wodge(self, topics,subject,text,distance,anonymous,in_reply_to=None): if topics == [ ]: raise error.Error(_('Please specify some topics.')) if self.app.config['human-name'] == '' and not anonymous: raise error.Error(_('Please specify your name using the "Network/Configure..." menu item.')) if subject == '': raise error.Error(_('Please give a subject for your item.')) if text == '': raise error.Error(_('Please write some text for your item.')) wodge_info = { 'topics' : topics, 'subject' : subject, 'text' : text, 'post-time' : long(time.time()) } if anonymous: wodge_info['name'] = '' wodge_info['human-name'] = '' else: wodge_info['name'] = self.app.name_server.public_key_name wodge_info['human-name'] = self.app.config['human-name'] if in_reply_to: wodge_info['in-reply-to'] = in_reply_to.signature wodge_str = safe_pickle.dumps(wodge_info) if anonymous: wodge_sig = hash.hash_of(wodge_str) else: wodge_sig = self.app.name_server.sign(wodge_str) wodge = Wodge({ 'wodge': wodge_info, 'string': wodge_str, 'signature': wodge_sig, 'initial_time': time.time(), 'initial_distance': distance, #Fixme: make unit_decay_time an option 'unit_decay_time': 60*60*24*7, 'opinions': { }, 'collapsed': 0}) # distance = post_distance + (time - post_time)/unit_decay_time self.insert_wodge(wodge) self.save()
def handle(self, request,address,call_id): check.check_matches(request, (types.StringType,)) check.check_is_af_inet_address(address) # Idempotence is a bugger if request[0] == 'data cache store' or \ request[0] == 'data cache store multi': self.lock.acquire() try: if self.cached_items.has_key(request[1]): if self.cached_items[request[1]].call == (address,call_id): return None return Error('already storing') if request[0] == 'data cache store': names = [ request[2] ] else: names = request[2] if len(names) > settings.max_cache_names: return Error('too many names') item = Cache_item(names,request[3],request[4],(address,call_id)) self.cached_items[request[1]] = item # here is the actual publish: redundancy is settings.cache_redundancy for name in names: self.node.publish(name, request[3], settings.cache_redundancy) finally: self.lock.release() return None if request[0] == 'data cache remove': self.lock.acquire() try: lock = hash.hash_of(request[1]) if self.cached_items.has_key(lock): self.node.unpublish(self.cached_items[lock].data) del self.cached_items[lock] finally: self.lock.release() return None
def start_watching(self, node): if not self.running: return if not self.watching: self.watcher = { 'type' : 'watch', 'whom' : self.name } name = self.name watcher = self.watcher self.watching = 1 node.publish(hash.hash_of('watch '+name), watcher, settings.identity_redundancy) if not self.online: self.watched = 0 utility.start_thread(acquaintance_watch_thread(self,node)) else: self.watched = 1
def __init__(self, node): utility.Task_manager.__init__(self) self.node = node self.start_time = time.time() # for up_time self.cache_pool = [ ] # list of known caches self.cache_pool_last_refresh = 0.0 self.cache_pool_original_size = 0 self.cache_pool_lock = threading.RLock() self.data = { 'type' : 'service data cache', 'up time' : 0, 'max expiry time' : 30*24*60*60 } self.name = hash.hash_of('service data cache') self.queued_items = [ ] self.cached_items = { }
def retrieve_cached_messages_thread(self,on_complete): pipe = self.node.retrieve( hash.hash_of('offline message '+self.app.name_server.public_key_name), settings.cache_redundancy) # Loose proof of @R50: @I22, @E22. pipe_reads = [ ] if not pipe.finished(): while 1: pipe_reads.extend(pipe.read_all()) if pipe.finished(): break yield 'sleep',1 pipe.stop() # pjm 2002-08-05: I've changed the above to sleep only if # one read_all call isn't enough. However, I don't know why # sleep is wanted in the first place, or whether a different # duration might be better. (Python sleep allows its # argument to be fractional, implemented in terms of # select.) unique_messages = [ ] for item in pipe_reads: # I think we're guaranteed that item matches ('af_inet_address', 'any'). if not check.matches(item[1], {'type' : types.StringType, 'crypt' : ('any', 'any')}): # print bad peer pass elif item[1] not in unique_messages: unique_messages.append(item[1]) message_list = [ ] for raw_msg in unique_messages: if type(raw_msg) == type({}) \ and raw_msg.get('type','') == 'offline message' \ and raw_msg.has_key('crypt'): try: decrypt = self.app.name_server.decrypt(raw_msg['crypt']) if not check.matches(decrypt, ('text', (types.StringType,), types.LongType)): raise error.Error('bad decrypted reply') # Remove from caches for thing in pipe_reads: if thing[1] == raw_msg: try: ticket, template, wait = self.node.call(\ thing[0],('data cache remove',decrypt[0])) if wait: yield ('call',(self.node,ticket)) self.node.get_reply(ticket,template) except error.Error: pass message_list.append((decrypt[2],decrypt[1])) except error.Error: pass message_list.sort() self.lock.acquire() try: any = 0 for msg in message_list: if msg not in self.offline_message_buffer: self.offline_message_buffer = [msg] + self.offline_message_buffer[:50] new_item = (msg[1], None, standard2host_timestamp(msg[0])) # Proof of @R36: msg is taken from message_list. # message_list is local to this method, and is not # passed to any other method (so is not shared with any # other thread). message_list starts as empty and is # written to solely as (decrypt[2],decrypt[1]) pairs, # and only where decrypt has already been found to # match ('any', ('string',), 'long'). The relevant # types are immutable. check.check_matches(new_item, unread_message_list_item_tmpl) # Proof: the @R36 proof just above also shows that # msg[1] (i.e. decrypt[1]) matches ('string',) and # is immutable. new_item[1] matches because # is_opt_address(None). new_item[2] matches from @E17. # Relevance: @R.I15 self.unread_message_list.append(new_item) any = 1 else: print _("Duplicate offline message.") finally: self.lock.release() on_complete(self,any)
def build_directory(path, mtime, flags, server): dir = Directory() dir.mtime = mtime dir.is_dir = 1 dir.names = [ ] basename = os.path.basename(path) dir.info = { 'type' : 'directory', 'filename' : basename, 'length' : 0L, 'path' : string.split(path,'/') } #print dir.info['path'] names = os.listdir(path) #for i in range(len(names)): # names[i] = utility.force_unicode(names[i]) # Option to limit number of files published # TODO: make it work with subdirectories if flags['max'] != None: names = names[:flags['max']] if flags.get('name'): if server.cache.has_key((path,mtime)): dir.hash, dir.length = server.cache[(path,mtime)] else: dir.length = 0 dir.hash = hash.hash_of('basename') server.cache[(path,mtime)] = (dir.hash, dir.length) dir.info['name'] = dir.hash dir.names.append(dir.hash) if not flags.get('name'): dir.info['local_path'] = path str = utility.remove_accents(string.lower(basename)) keywords = [ ] if flags.get('filename'): keywords.append(str) if flags.get('keywords'): for char in '+-_.,?!()[]': str = string.replace(str,char," ") keywords.extend(string.split(str)) dir.info['keywords'] = [ ] dir.files = [ ] for item in names: if item[0] != '.': dir.files.append(os.path.join(path,item)) #for the moment do not publish directories return dir for word in keywords: word=utility.force_string(word) if len(word) >= min_search_keyword_len and word not in dir.info['keywords']: dir.info['keywords'].append(word) if flags.get('name'): dir.names.append(hash.hash_of(word)) # publish directory... # todo: publish after all files have been hashed, # generate name from their hash if flags.get('name'): if not server.entries.has_key(path): for name in dir.names: server.node.publish(name, dir.info) elif server.entries[path].mtime != mtime: #first unpublish outdated info #print "unpublishing outdated dir" server.node.unpublish(dir.info) for name in dir.names: server.node.publish(name, dir.info) server.entries[path] = dir server.paths[dir.hash] = (path, dir.mtime) server.names[path] = dir.hash return dir
def build_file(path, mtime,flags,server): file = File() file.is_dir = 0 file.mtime = mtime # basename = utility.force_unicode(os.path.basename(path)) # do not convert to unicode, because published data should not # depend on the terminal encoding of the client basename = os.path.basename(path) file.length = os.stat(path)[6] file.names = [ ] file.info = { 'type' : 'file', 'filename' : basename, 'length' : file.length, } if flags.get('name'): if server.cache.has_key((path,mtime)): file.hash, file.length = server.cache[(path,mtime)] else: try: f = open(path,'rb') m = md5.new() file.length = 0L while 1: str = f.read(1<<20) if str == '': break m.update(str) file.length = file.length + len(str) f.close() file.hash = m.digest() except IOError: raise Error('bad file') server.cache[(path,mtime)] = (file.hash, file.length) file.info['name'] = file.hash file.names.append(file.hash) if flags.get('local'): file.info['local_path'] = path str = utility.remove_accents(string.lower(basename)) keywords = [ ] if flags.get('filename'): keywords.append(str) if flags.get('keywords'): for char in '+-_.,?!()[]': str = string.replace(str,char," ") keywords.extend(string.split(str)) if flags.get('mime'): list = {} if string.lower(path[-4:]) =='.mp3': list = mp3.mp3_info(path) elif string.lower(path[-4:]) =='.ogg': list = mp3.ogg_info(path) if list: for (k,v) in list.items(): file.info[k] = v if file.info.get('music_title'): keywords.extend(string.split( utility.remove_accents(string.lower(file.info['music_title'])))) if file.info.get('music_artist'): keywords.extend(string.split( utility.remove_accents(string.lower(file.info['music_artist'])))) file.info['keywords'] = [ ] if flags.get('mime'): import classify try: information = classify.classifier.information(path) for key in information.keys(): if information[key] == None: #print "[Harmless warning] Can not classify : ", path continue if len(information[key]) >= min_search_keyword_len: file.info[key] = information[key] except: sys.stderr.write("Exception caught while classifying file.\n") for word in keywords: word=utility.force_string(word) if len(word) >= min_search_keyword_len and word not in file.info['keywords']: file.info['keywords'].append(word) if flags.get('name'): file.names.append(hash.hash_of(word)) # publish immediately... if flags.get('name'): if not server.entries.has_key(path): for name in file.names: server.node.publish(name, file.info) elif server.entries[path].mtime != mtime: #first unpublish outdated info print "unpublishing outdated:",path server.node.unpublish(file.info) for name in file.names: server.node.publish(name, file.info) server.entries[path] = file server.paths[file.hash] = (path, file.mtime) server.names[path] = file.hash return file
def sign(self, str): return self.key.sign(hash.hash_of(str),'')
def startup_thread(self, status_monitor=status_monitor): list = self.acquaintances.values() list.sort(lambda x,y: cmp(x.sort_value(),y.sort_value())) for item in list: item.start_watching(self.node) #start watching tends to breed, try to make sure we don't get #too many threads. #yes, this is hacky #print item.nickname, threading.activeCount() #time.sleep(0.25) while 1: yield 'sleep',0.25 if threading.activeCount() < 40: break self.me.start_watching(self.node) while not self.me.watched: yield 'sleep',0.1 online = self.me.online address = self.me.address if online: if status_monitor: status_monitor(_('Shutting down your other peer.')) while 1: ticket,template,wait = self.node.call(address, ('identity abort',)) if wait: yield 'call',(self.node,ticket) try: dummy_result = self.node.get_reply(ticket, template) except error.Error: break yield 'sleep',4 self.me.online = 1 self.me.address = self.node.address self.me.connect_time = time.time() # Task to retrieve existing watchers # Task to poll existing watchers utility.start_thread(name_server_watch_poller_thread(self)) # now refresh my own offline presence pipe = self.node.retrieve(self.public_key_name_offline, settings.cache_redundancy) list = [ ] while not pipe.finished(): for item in pipe.read_all(): if type(item[1]) == types.DictType and \ item[1].get('type') == 'identity offline' and \ item[1].get('salt'): list.append(item) yield 'sleep',2 if not self.running: return pipe.stop() #if len(list) != 4: # print _("%d peers holding your offline presence.") % len(list) for item in list: address, value = item key = hash.hash_of(safe_pickle.dumps(self.sign(value['salt']))) ticket, template, wait = self.node.call(address, ('data cache remove',key)) if wait: yield 'call',(self.node,ticket) try: dummy_result = self.node.get_reply(ticket,template) except error.Error: pass self.lock.acquire() try: package = { 'name' : self.info['name'], 'human-name' : self.info['human-name'], 'description': self.info['description'], 'timezone' : self.info['timezone'], 'key' : self.public_key, 'keywords' : self.info['keywords'], } finally: self.lock.release() package_dumped = safe_pickle.dumps(package) signature = self.sign(package_dumped) # now publish and cache offline identity value = { 'type' : 'identity offline', 'package' : package_dumped, 'signature' : signature, 'salt' : utility.random_bytes(settings.name_bytes) } lock = hash.hash_of(hash.hash_of(safe_pickle.dumps(self.sign(value['salt'])))) publications = [ self.public_key_name_offline, self.service_name ] for item in package['keywords']: publications.append(hash.hash_of('identity-name '+item)) # thomasV # redundancy 4: this is the meta-publish result, publish_thread = self.app.cache.publish(publications,value,lock, 4) yield 'wait',publish_thread
def start(self, status_monitor=None): utility.Task_manager.start(self) acq_list = utility.get_checked_config('acquaintances', types.ListType, [ ]) for item in acq_list: if not check.matches(item, Acquaintance.map_tmpl): print _("Warning: corrupted acquaintances config file; ignoring item: "), item continue acq = Acquaintance(self, item, 1) self.acquaintances[acq.name] = acq self.nicknames[acq.nickname] = acq acq.start() def make_acquaintance_noaddr(self, info): name = key_name(info['key']) acq = Acquaintance(self, {'info': info, 'name': name, 'nickname': self.choose_nickname(info['name'])},0) self.acquaintances[name] = acq self.nicknames[acq.nickname] = acq acq.start() acq.start_watching(self.node) self.acquaintance_status_changed(acq, "create") return acq self.me = make_acquaintance_noaddr(self,self.info) # Other me may want to test identity # May start chatting before test complete self.node.add_handler('identity test', self, ('name',), crypto.pubkey.signature_tmpl) self.node.add_handler('identity query', self, (), Acquaintance.info_template) self.node.add_handler('identity watch', self, (), types.DictionaryType) self.node.add_handler('identity connecting', self) self.node.add_handler('identity status changed', self, ('any', Acquaintance.status_template)) self.node.add_handler('identity disconnecting', self,('string', 'opt-text')) self.node.add_handler('identity abort', self) self.node.publish(self.public_key_name,self.get_info_func, settings.identity_redundancy) self.node.publish(self.public_key_name_offline,self.get_info_func, settings.identity_redundancy) self.node.publish(self.service_name,self.get_info_func, settings.identity_redundancy) for item in self.info['keywords']: self.node.publish(hash.hash_of('identity-name '+item), self.get_info_func, settings.identity_redundancy) def startup_thread(self, status_monitor=status_monitor): list = self.acquaintances.values() list.sort(lambda x,y: cmp(x.sort_value(),y.sort_value())) for item in list: item.start_watching(self.node) #start watching tends to breed, try to make sure we don't get #too many threads. #yes, this is hacky #print item.nickname, threading.activeCount() #time.sleep(0.25) while 1: yield 'sleep',0.25 if threading.activeCount() < 40: break self.me.start_watching(self.node) while not self.me.watched: yield 'sleep',0.1 online = self.me.online address = self.me.address if online: if status_monitor: status_monitor(_('Shutting down your other peer.')) while 1: ticket,template,wait = self.node.call(address, ('identity abort',)) if wait: yield 'call',(self.node,ticket) try: dummy_result = self.node.get_reply(ticket, template) except error.Error: break yield 'sleep',4 self.me.online = 1 self.me.address = self.node.address self.me.connect_time = time.time() # Task to retrieve existing watchers # Task to poll existing watchers utility.start_thread(name_server_watch_poller_thread(self)) # now refresh my own offline presence pipe = self.node.retrieve(self.public_key_name_offline, settings.cache_redundancy) list = [ ] while not pipe.finished(): for item in pipe.read_all(): if type(item[1]) == types.DictType and \ item[1].get('type') == 'identity offline' and \ item[1].get('salt'): list.append(item) yield 'sleep',2 if not self.running: return pipe.stop() #if len(list) != 4: # print _("%d peers holding your offline presence.") % len(list) for item in list: address, value = item key = hash.hash_of(safe_pickle.dumps(self.sign(value['salt']))) ticket, template, wait = self.node.call(address, ('data cache remove',key)) if wait: yield 'call',(self.node,ticket) try: dummy_result = self.node.get_reply(ticket,template) except error.Error: pass self.lock.acquire() try: package = { 'name' : self.info['name'], 'human-name' : self.info['human-name'], 'description': self.info['description'], 'timezone' : self.info['timezone'], 'key' : self.public_key, 'keywords' : self.info['keywords'], } finally: self.lock.release() package_dumped = safe_pickle.dumps(package) signature = self.sign(package_dumped) # now publish and cache offline identity value = { 'type' : 'identity offline', 'package' : package_dumped, 'signature' : signature, 'salt' : utility.random_bytes(settings.name_bytes) } lock = hash.hash_of(hash.hash_of(safe_pickle.dumps(self.sign(value['salt'])))) publications = [ self.public_key_name_offline, self.service_name ] for item in package['keywords']: publications.append(hash.hash_of('identity-name '+item)) # thomasV # redundancy 4: this is the meta-publish result, publish_thread = self.app.cache.publish(publications,value,lock, 4) yield 'wait',publish_thread utility.start_thread(startup_thread(self))
def find_task(daemon, query, input, output, connection): import safe_pickle for char in "+-_.,?()![]": query = query.replace(char, " ") query = query.lower() list = query.split() if list: key = list[0] pipe = daemon.node.retrieve(hash.hash_of("identity-name " + key), settings.identity_redundancy) else: pipe = daemon.node.retrieve(hash.hash_of("service identity"), settings.identity_redundancy) results = [] while not pipe.finished(): list = pipe.read_all() prev_pair = None for pair in list: if pair == prev_pair: continue link, item = prev_pair = pair try: item = utility.check_and_demangle_item(item) except: continue if item["key"] not in results: results.append(item["key"]) name = hash.hash_of(safe_pickle.dumps(item["key"])) check.check_is_name(name) str = hash.hash_to_person(name) output.write(str + " " + item["name"] + " (" + utility.force_string(item["human-name"]) + ")\n") time.sleep(0.5) try: output.flush() except: return if not results: try: output.write('No user matching "' + key + '"') except: pass else: if results.__len__() == 1: msg = "1 user found." else: msg = "%d users found." % results.__len__() output.write(msg) pipe.stop() try: input.close() output.close() connection.close() except: # connection reset by peer... pass
def chat_tell_thread(chat, dests, message, attachment=None, augmentation=None): """Messages are sent in parallel, one thread per recipient. The information displayed in field ends with '...' until the last message has been sent, then with '..' until all recipients have answered, and finally with '.' The field is closed by the last thread to finish """ check.check_matches(dests, ['text']) check.check_is_text(message) std_timestamp = host2standard_timestamp(time.time()) if augmentation and len(augmentation) != len(message): sys.stderr.write("Augmentation %d bytes, should be %d bytes.\n" % (len(augmentation), len(message))) augmentation = None message_id = chat.create_message_id() try: result = ['','','','','...\n'] dest_list = [ ] dest_address = [ ] checked_address = [ ] dest_names = [ ] received_names = [ ] offline_list = [ ] offline_names = [ ] recipient_list = [ ] channel_pipes = { } chan_cnt=[0] for dest in dests: if dest[0]=='#': channel_pipes[dest] = chat.channels.sub_list_pipe(dest) recipient_list.append((None,None,dest)) continue acq = chat.app.name_server.locate(dest) acq.start_watching(chat.node) while not acq.watched: yield 'sleep',0.1 acq.lock.acquire() online = acq.online address = acq.address name = acq.name username = acq.info['name'] acq.lock.release() if not online: offline_list.append(acq) offline_names.append(dest) recipient_list.append((name,None,username)) else: dest_list.append(acq) dest_address.append(address) dest_names.append(dest) recipient_list.append((name,address,username)) my_details = ( chat.app.name_server.public_key_name, chat.app.name_server.name ) package = { 'from': my_details, 'to': recipient_list, 'text': message } if augmentation: package['aug'] = augmentation if attachment: package['attach'] = attachment package_pickle = safe_pickle.dumps(package) signature = chat.app.name_server.sign(package_pickle) message = ('chat message 2',package_pickle,signature) def recipient_thread(chat,address,name,channel,received_names,dest_address, checked_address,chan_cnt,message_id,result, message=message): succ = 1 try: ticket, template, wait = chat.node.call(address,message) if wait: yield ('call',(chat.node,ticket)) ret_value = chat.node.get_reply(ticket,template) #Type checking if type(ret_value) != types.TupleType: succ = 0 if not channel: result[2] = result[2] + name + _(' sent bad reply.\n') chat.update_message_status(message_id, result[0]+result[4]+result[2]) except error.Error,err: succ = 0 if not channel: result[2] = result[2] + name + _(' could not be contacted: ')+err.message+'\n' chat.update_message_status(message_id, result[0]+result[4]+result[2]) chat.app.name_server.bad_address(address) if succ: if channel: chan_cnt[0] += 1 else: received_names.append(name) if ret_value[0]: pass #if ret_value[1] != '': # result[2] = result[2] + name + ' ' + ret_value[1] + '\n' else: if ret_value[1] == '': result[2] = result[2] + name + _(' is quiet.\n') else: result[2] = result[2] + name + _(' is quiet: ') + ret_value[1] + '\n' if chan_cnt[0] == 0: result[0] = _('Message received by ') + utility.english_list(received_names) elif chan_cnt[0] == 1: result[0] = _('Message received by ')\ +utility.english_list(received_names+[ _('1 person')])+result[1] else: result[0] = _('Message received by ')\ +utility.english_list(received_names+[ _('%d people') % chan_cnt[0] ])+result[1] chat.update_message_status(message_id,result[0]+result[4]+result[2]) checked_address.append(address) checked_address.sort() if result[4]=='..\n' and checked_address == dest_address: if chan_cnt[0]==0 and received_names == []: result[0] = _('Nobody received your message') result[4] = '.\n' chat.update_message_status(message_id, result[0]+result[4]+result[2]) for i in range(len(dest_list)): utility.start_thread(recipient_thread(chat,dest_address[i],dest_names[i],0,\ received_names,dest_address,checked_address,chan_cnt,message_id,result)) if channel_pipes: if channel_pipes.__len__()>1: result[1] = (_(' on channels %s') % utility.english_list(channel_pipes.keys())) else: result[1] = (_(' on channel %s') % channel_pipes.keys()[0]) if channel_pipes: for chan_name in channel_pipes.keys(): if not chat.channels.cache.has_key(chan_name): chat.channels.cache[chan_name]=[] else: for address in chat.channels.cache[chan_name]: if address in dest_address: continue dest_address.append(address) utility.start_thread( recipient_thread(chat,address,'',1,received_names, dest_address,checked_address,chan_cnt,message_id,result)) #reset the cache: chat.channels.cache[chan_name]=[] while channel_pipes: for chan_name,chan_pipe in channel_pipes.items(): if chan_pipe.finished(): chan_pipe.stop() del channel_pipes[chan_name] continue for address in chan_pipe.read_all(): #update the cache if address not in chat.channels.cache[chan_name]: chat.channels.cache[chan_name].append(address) if address in dest_address: continue dest_address.append(address) utility.start_thread(recipient_thread(chat,address,'',1,received_names, dest_address,checked_address,chan_cnt,message_id,result)) yield 'sleep',0.1 #we now have launched all the tasks dest_address.sort() result[4]='..\n' if checked_address == dest_address: if chan_cnt[0]==0 and received_names == []: result[0] = _('Nobody received your message') result[4] = '.\n' chat.update_message_status(message_id, result[0]+result[4]+result[2]) recall_list = [ ] if offline_list: chat.update_message_status(message_id,'Caching message...\n',1) for i in range(len(offline_list)): key = utility.random_bytes(settings.name_bytes) lock = hash.hash_of(key) crypt = offline_list[i].encrypt((key, message, std_timestamp)) data = { 'type' : 'offline message', 'crypt': crypt } name = hash.hash_of('offline message '+offline_list[i].name) # Loose proof of @R50: @I21, @E22. recall_list.append((offline_list[i].nickname,name,data,key)) publish_result, subthread = chat.app.cache.publish([name],data,lock) yield 'wait',subthread if publish_result: redundancy = publish_result[0] else: redundancy = 0 if redundancy == settings.cache_redundancy: str = offline_names[i] + _(' will get your message when next logged on.\n') elif redundancy == 0: str = _('Could not store message for ') + offline_names[i] + '!\n' else: str = (offline_names[i] + _(' will probably get your message when next logged on:\n' +\ ' Message only stored with redundancy %d.\n') % redundancy) result[3] = result[3] + str chat.update_message_status(message_id,result[3]+'\n',1) chat.recall_list = recall_list
def sub_list_pipe(self, channel): return self.node.retrieve( hash.hash_of("channel subscribe "+channel), settings.channel_redundancy,1)
# Test identity check.check_is_af_inet_address(address) #check.check_isinstance(the_node, Node) tester = utility.random_bytes(settings.name_bytes) ticket, template, wait = the_node.call(address, ('identity test',tester)) if wait: yield 'call',(the_node,ticket) try: result = the_node.get_reply(ticket, template) except error.Error,err: print "error",err id_test_result.append(0) return tester = hash.hash_of('identity test '+tester) try: if not crypto.RSA.construct(key).verify(tester,result): print "Fake identity." id_test_result.append(0) return except: traceback.print_exc() print 'Error in RSA.' id_test_result.append(0) return id_test_result.append(1) return
def key_name(key): ret = hash.hash_of(safe_pickle.dumps(key)) check.check_is_name(ret) #=@E25 # Proof: @E24. return ret
def search_task(daemon, query, input, output, connection): for char in "+-_.,?()![]": query = query.replace(char, " ") query = query.lower() list = query.split() if list: key = list[0] else: key = "" if key.__len__() < 3: output.write("Keyword %s too short: must be at least 3 characters" % key) input.close() output.close() connection.close() return pipe = daemon.node.retrieve(hash.hash_of(key)) results = [] restricted = 0 while not pipe.finished() and not restricted: for item in pipe.read_all(): if results.__len__() == 100: restricted = 1 break if item[1]["name"] not in results: results.append(item[1]["name"]) filename = utility.force_string(item[1]["filename"]) extension = string.split(string.split(filename, ".")[-1], "-")[0] lext = string.lower(extension) if lext in ["mp3", "ogg"]: music = 1 else: music = 0 if item[1].has_key("music_title"): ref = utility.force_string(item[1]["music_title"]) if ref.strip() == "": ref = filename else: ref = utility.force_string(item[1]["filename"]) length = item[1].get("length") if not length: sl = "" else: sl = utility.human_size(length) output.write(hash.hash_to_url(item[1]["name"]) + " \t" + sl + " \t" + filename + "\n") time.sleep(0.5) try: output.flush() except: return if not results: try: output.write('No document matching "' + key + '"') except: pass else: if results.__len__() == 1: msg = "1 file found." else: msg = "%d files found." % results.__len__() output.write(msg) pipe.stop() try: input.close() output.close() connection.close() except: pass