Esempio n. 1
0
def random_name():
    ret = ''
    for i in range(16):
        ret += random_char()

    check.check_is_name(ret)
    return ret
Esempio n. 2
0
def hash_to_person(name):
    check.check_is_name(name)
    value = 0L
    for i in range(settings.name_bytes):
        value = (value<<8) + ord(name[i])

    str = ''
    for i in range(0,settings.name_bytes*8,6):
        str = _encoding[value & 0x3f] + str
        value = value >> 6
    return 'circle-person:'+str+''
Esempio n. 3
0
def hash_of(str):
    """ Find the true name of a string.

            hash_of returns the md5 digested value of str."""
    check.check_is_text(str)    #=@R50

    if type(str) == types.UnicodeType:
        str = str.encode('utf8')
    
    m = md5.new()
    m.update(str)
    ret = m.digest()
    check.check_is_name(ret)  #=@E24
    return ret
Esempio n. 4
0
    def publish(self, names, data, lock, redundancy=settings.cache_redundancy, expiry=None):
        """
        This is a meta-operation: it publishes in the
        cache of redundancy nodes, which will themselves
        publish the data with their own method node.publish
        
        Returns a non-negative integer representing the number of nodes at
        which the data has been stored."""
        
        check.check_matches(names, ['name'])
        check.check_matches(data, {'type': types.StringType})
        check.check_is_name(lock)

        def publish_thread(self, result, names, data, lock, redundancy, expiry):

            if not (self.cache_pool_last_refresh+10*60 > time.time() or \
                 len(self.cache_pool) > self.cache_pool_original_size/2):

                cache_pool = [ ]

                pipe = self.node.retrieve(self.name, settings.cache_redundancy)

                while len(cache_pool) < 50 and not pipe.finished():
                    list = pipe.read_all()
                    for item in list:
                        check.check_matches(item, ('af_inet_address', 'any'))
                        # Proof: @E6.  Combine with @E10 and the fact that we don't
                        # pass pipe to anything else (nor write to it ourselves
                        # other than through pipe.read_all()) to show that @E6
                        # isn't broken by some other write.

                        if type(item[1]) != types.DictionaryType:
                            # todo: consider printing debug info
                            continue

                        if item[1].get('type','') == 'service data cache':
                            #cache_pool.append((-item[1]['up time'],item[0],item[1]))
                            
                            up_time = item[1].get('up time')

                            if type(up_time) != types.IntType:
                                # todo: consider printing debug info
                                continue

                            # Treat all up for over a day as equal to share load
                            if up_time > 24*60*60:
                                up_time = 24*60*60 + random.random()

                            cache_pool.append((-up_time,item[0],item[1]))
                        
                    yield 'sleep',1

                pipe.stop()
                self.cache_pool = cache_pool
                self.cache_pool.sort()
                self.cache_pool_original_size = len(self.cache_pool)

            if not expiry:
                expiry = self.data['max expiry time']

            if expiry < 0:
                result.append(0)
                return 

            pool = self.cache_pool[:]
            bad_items = [ ]

            pos = 0
            n   = 0
            while n < redundancy:
                if len(pool) <= pos:
                    result.append(n)
                    return 

                try:
                    if len(names) == 1:
                        ticket, template, wait = self.node.call(
                            pool[pos][1],('data cache store',lock,names[0],data,expiry))
                        if wait: yield 'call',(self.node,ticket)
                        dummy_result = self.node.get_reply(ticket, template)
                    else:
                        ticket, template, wait = self.node.call(
                            pool[pos][1],('data cache store multi',lock,names,data,expiry))
                        if wait: yield 'call',(self.node,ticket)
                        dummy_result = self.node.get_reply(ticket, template)
                except Error, error:
                    if error.message != 'already storing':
                        bad_items.append(pool[pos])
                    else:
                        # thomasV: we need to limit amplification
                        # print 'already storing', pool[pos]
                        n = n + 1
                        # end thomasV
                    pos = pos + 1
                    continue
                pos = pos + 1
                n   = n   + 1

            self.cache_pool_lock.acquire()
            try:
                for item in bad_items:
                    if item in self.cache_pool:
                        self.cache_pool.remove(item)
            finally:
                self.cache_pool_lock.release()

            result.append(n)
            return 
Esempio n. 5
0
def key_name(key):
    ret = hash.hash_of(safe_pickle.dumps(key))
    check.check_is_name(ret)  #=@E25
    # Proof: @E24.
    return ret
Esempio n. 6
0
 def check_invar(self):
     check.check_is_name(self.public_key_name)  #=@I22
Esempio n. 7
0
 def check_invar(self):
     check.check_is_name(self.name)  #=@I21
Esempio n. 8
0
def find_task(daemon, query, input, output, connection):

    import safe_pickle

    for char in "+-_.,?()![]":
        query = query.replace(char, " ")
    query = query.lower()
    list = query.split()
    if list:
        key = list[0]
        pipe = daemon.node.retrieve(hash.hash_of("identity-name " + key), settings.identity_redundancy)
    else:
        pipe = daemon.node.retrieve(hash.hash_of("service identity"), settings.identity_redundancy)

    results = []
    while not pipe.finished():

        list = pipe.read_all()
        prev_pair = None
        for pair in list:
            if pair == prev_pair:
                continue
            link, item = prev_pair = pair

            try:
                item = utility.check_and_demangle_item(item)
            except:
                continue

            if item["key"] not in results:
                results.append(item["key"])
                name = hash.hash_of(safe_pickle.dumps(item["key"]))
                check.check_is_name(name)
                str = hash.hash_to_person(name)
                output.write(str + "   " + item["name"] + " (" + utility.force_string(item["human-name"]) + ")\n")

        time.sleep(0.5)
        try:
            output.flush()
        except:
            return

    if not results:
        try:
            output.write('No user matching "' + key + '"')
        except:
            pass
    else:
        if results.__len__() == 1:
            msg = "1 user found."
        else:
            msg = "%d users found." % results.__len__()
        output.write(msg)

    pipe.stop()
    try:
        input.close()
        output.close()
        connection.close()
    except:
        # connection reset by peer...
        pass