Exemplo n.º 1
0
def donkey_retrieve_task(pipe, donkey):

    donkey.results = []
    donkey.waiting_results = 1
    donkey.reading_results = 0

    time_0 = time.time()
    while donkey.waiting_results:
        time.sleep(0.3)
        if time.time() - time_0 > 60.0:
            donkey.waiting_results = 0

    donkey.lock.acquire()
    donkey.reading_results = 1
    donkey.lock.release()

    donkey.tell("vr\n")
    time.sleep(3)

    donkey.lock.acquire()
    donkey.reading_results = 0
    donkey.lock.release()

    for item in donkey.results:
        ss2 = item.split("\t")  # all fields
        ss = ss2[0].split(" ", 1)  # name and number, first field
        if len(ss2) > 1:
            pipe.write(
                (
                    ("overnet", donkey),
                    {
                        "keywords": [donkey.key],
                        "length": int(ss2[1]),
                        "type": "file",
                        "mime": utility.force_string(ss2[2].lower().replace("\n", "")),
                        "name": utility.force_string("file:" + ss[0][1:-1]),
                        "index": ss[0][1:-1],
                        "filename": ss[1],
                    },
                )
            )
Exemplo n.º 2
0
    def set_roots(self, config):

        roots = [ ]
        self.private_directory = ''

        if config.get('public_dir','') != '':
            list = map(string.strip,string.split(config['public_dir'],','))
            if list:
                roots.extend(list)

        if config.get('download_dir','') != '':
            list = map(string.strip,string.split(config['download_dir'],','))
            if list:
                #config['download_dir'] = list[0]
                roots.extend(list)

        if config.get('private_dir','') != '':
            list = map(string.strip,string.split(config['private_dir'],','))
            if list:
                self.private_directory = list[0]
                roots.extend(list)
                
        for i in range(len(roots)):
            roots[i] = (utility.force_string(roots[i]), flags_full)            

        if config.get('publish_apt',''):
            roots.append(('/var/cache/apt/archives',flags_deb))
                
        # avoid redundancy (prefixes) in the roots list
        np_roots = [ ]
        for root in roots:
            ok = 1
            for np_root in np_roots:
                if utility.is_subdir(root[0],np_root[0]):
                    ok = 0
                elif utility.is_subdir(np_root[0],root[0]):
                    np_roots.remove(np_root)
            if ok:
                np_roots.append(root)

        self.lock.acquire()
        #self.roots.append(os.path.abspath(path))
        #self.roots.append((path,flags))
        self.roots = np_roots
        self.fresh = 0
        self.lock.release()
Exemplo n.º 3
0
    def __init__(self, data, directory):

        utility.Task_manager.__init__(self)
        self.data = data
        self.directory = directory
        self.bytes_downloaded = 0L
        self.comment = ''
        self.remaining_time = 0.0
        self.success=0
        self.fields = [ ] #list of chat fields to update
        
        if not directory:
            raise Error('No download directory configured yet.')

        #here we get rid of unicode for filename
        self.filename = os.path.join(
            self.directory,
            os.path.basename(utility.force_string(self.data['filename'])))
        self.basename = os.path.basename(self.filename)
Exemplo n.º 4
0
    def handle(self, request, address, call_id):
        check.check_matches(request, (types.StringType,))
        check.check_is_af_inet_address(address)

        if request[0] == 'download chunk':
            path, mtime = self.paths.get(request[1],(None,None))
            if not path:
                path, mtime = self.private_paths.get(request[1],(None,None))
                
            try:
                if not path or \
                     os.path.getmtime(path) != mtime:
                    return Error("no such file")
                file = open(path,'rb') 
            except IOError:
                return Error("no such file")

            if address[0] !='127.0.0.1'\
                   and address not in self.node.trusted_addresses \
                   and self.private_directory != ''\
                   and utility.is_subdir(path,self.private_directory):
                return Error("access denied")

            try:
                file.seek(request[2])
                return file.read(request[3])
            finally:
                file.close()

        elif request[0] == 'files available':

            if len(request) == 3:
                # this returns the keys of
                # all published files from all directories
                # regardless of the directory structure
                list = self.paths.keys()
            else:
                list = [ ]
                access_denied = 0
                directory_found = 0

                string_request = []
                for str in request[3]:
                    string_request.append(utility.force_string(str))

                if not self.roots:
                    return Error('No public directory')
                request_dir = apply(
                    os.path.join,
                    [os.path.abspath(self.roots[0][0])] + string_request)

                if address[0]=='127.0.0.1':
                    flags = flags_local
                else:
                    flags = flags_fast            
                    
                if os.path.exists(request_dir):
                    directory_found = 1

                if address[0]!='127.0.0.1' \
                       and address not in self.node.trusted_addresses\
                       and self.private_directory != ''\
                       and utility.is_subdir(request_dir,self.private_directory):
                    access_denied = 1
                    
                if not directory_found:
                    return Error("no such directory: %s"%request_dir)
                elif access_denied:
                    return Error("access denied")
                
                entry = build_entry(request_dir,None,flags,self)
                
                if entry:
                    if not entry.is_dir:
                        return Error("not a directory: "+request_dir)
                    for file in entry.files:
                        entry_2 = build_entry(file,None,flags,self)
                        if entry_2:
                            info = entry_2.info.copy()
                            info['path'] = request[3] + [ info['filename'] ]
                            list.append(info)

            return list[request[1]:request[2]]
Exemplo n.º 5
0
def build_directory(path, mtime, flags, server):
    dir = Directory()
    dir.mtime = mtime
    dir.is_dir = 1
    dir.names = [ ]
    basename = os.path.basename(path)

    dir.info = {
        'type'     : 'directory',
        'filename' : basename,
        'length'   : 0L,
        'path'     : string.split(path,'/')
    }
    #print dir.info['path']
    names = os.listdir(path)
    
    #for i in range(len(names)):
    #    names[i] = utility.force_unicode(names[i])

    # Option to limit number of files published
    # TODO: make it work with subdirectories
    if flags['max'] != None:
        names = names[:flags['max']]
        

    if flags.get('name'):
        if server.cache.has_key((path,mtime)):
            dir.hash, dir.length = server.cache[(path,mtime)]
        else:
            dir.length = 0
            dir.hash = hash.hash_of('basename')
            server.cache[(path,mtime)] = (dir.hash, dir.length)
            
        dir.info['name'] = dir.hash
        dir.names.append(dir.hash)



    if not flags.get('name'):
        dir.info['local_path'] = path

    str = utility.remove_accents(string.lower(basename))
    keywords = [ ]    
    if flags.get('filename'):
        keywords.append(str)
    if flags.get('keywords'):
        for char in '+-_.,?!()[]':
            str = string.replace(str,char," ")
        keywords.extend(string.split(str))         

    dir.info['keywords'] = [ ]

    dir.files = [ ]
    for item in names:
        if item[0] != '.':
            dir.files.append(os.path.join(path,item))

    #for the moment do not publish directories
    return dir

    for word in keywords:
        word=utility.force_string(word)
        if len(word) >= min_search_keyword_len and word not in dir.info['keywords']:
            dir.info['keywords'].append(word)
            if flags.get('name'):
                dir.names.append(hash.hash_of(word))

    # publish directory...
    # todo: publish after all files have been hashed,
    # generate name from their hash
    if flags.get('name'):
        if not server.entries.has_key(path):
            for name in dir.names:
                server.node.publish(name, dir.info)
        elif server.entries[path].mtime != mtime:
            #first unpublish outdated info
            #print "unpublishing outdated dir"
            server.node.unpublish(dir.info)
            for name in dir.names:
                server.node.publish(name, dir.info)

        server.entries[path]    = dir
        server.paths[dir.hash] = (path, dir.mtime)
        server.names[path]      = dir.hash

    return dir
Exemplo n.º 6
0
def build_file(path, mtime,flags,server):
    
    file = File()
    file.is_dir = 0
    file.mtime = mtime

    # basename = utility.force_unicode(os.path.basename(path))
    # do not convert to unicode, because published data should not
    # depend on the terminal encoding of the client
    basename = os.path.basename(path)
    
    file.length = os.stat(path)[6]
    file.names = [ ]
    file.info = {
        'type'     : 'file',
        'filename' : basename,
        'length'   : file.length,
    }

    if flags.get('name'):
        if server.cache.has_key((path,mtime)):
            file.hash, file.length = server.cache[(path,mtime)]
        else:
            try:
                f = open(path,'rb')
                m = md5.new()
                file.length = 0L
                while 1:
                    str = f.read(1<<20)
                    if str == '': break
                    m.update(str)
                    file.length = file.length + len(str)
                f.close()
                file.hash = m.digest()
            except IOError:
                raise Error('bad file')
            server.cache[(path,mtime)] = (file.hash, file.length)
            
        file.info['name'] = file.hash
        file.names.append(file.hash)

    if flags.get('local'):
        file.info['local_path'] = path
    
    str = utility.remove_accents(string.lower(basename))
    keywords = [ ]
    
    if flags.get('filename'):
        keywords.append(str)

    if flags.get('keywords'):
        for char in '+-_.,?!()[]':
            str = string.replace(str,char," ")

        keywords.extend(string.split(str))         

    if flags.get('mime'):
        list = {}
        if string.lower(path[-4:]) =='.mp3':
            list = mp3.mp3_info(path)
        elif string.lower(path[-4:]) =='.ogg':
            list = mp3.ogg_info(path)
        if list:
            for (k,v) in list.items():
                file.info[k] = v

        if file.info.get('music_title'):
            keywords.extend(string.split(
                utility.remove_accents(string.lower(file.info['music_title']))))
        if file.info.get('music_artist'):
            keywords.extend(string.split(
                utility.remove_accents(string.lower(file.info['music_artist']))))

    file.info['keywords'] = [ ]

    if flags.get('mime'):
        import classify
        try:        
            information = classify.classifier.information(path)
            for key in information.keys():
                if information[key] == None:
                    #print "[Harmless warning] Can not classify : ", path
                    continue
    
                if len(information[key]) >= min_search_keyword_len:
                    file.info[key] = information[key]
        except:
            sys.stderr.write("Exception caught while classifying file.\n")

        
    for word in keywords:
        word=utility.force_string(word)
        if len(word) >= min_search_keyword_len and word not in file.info['keywords']:
            file.info['keywords'].append(word)
            if flags.get('name'):
                file.names.append(hash.hash_of(word))


    # publish immediately...
    if flags.get('name'):
        if not server.entries.has_key(path):
            for name in file.names:
                server.node.publish(name, file.info)
        elif server.entries[path].mtime != mtime:
            #first unpublish outdated info
            print "unpublishing outdated:",path
            server.node.unpublish(file.info)
            for name in file.names:
                server.node.publish(name, file.info)

        server.entries[path]    = file
        server.paths[file.hash] = (path, file.mtime)
        server.names[path]      = file.hash




    return file
Exemplo n.º 7
0
def find_task(daemon, query, input, output, connection):

    import safe_pickle

    for char in "+-_.,?()![]":
        query = query.replace(char, " ")
    query = query.lower()
    list = query.split()
    if list:
        key = list[0]
        pipe = daemon.node.retrieve(hash.hash_of("identity-name " + key), settings.identity_redundancy)
    else:
        pipe = daemon.node.retrieve(hash.hash_of("service identity"), settings.identity_redundancy)

    results = []
    while not pipe.finished():

        list = pipe.read_all()
        prev_pair = None
        for pair in list:
            if pair == prev_pair:
                continue
            link, item = prev_pair = pair

            try:
                item = utility.check_and_demangle_item(item)
            except:
                continue

            if item["key"] not in results:
                results.append(item["key"])
                name = hash.hash_of(safe_pickle.dumps(item["key"]))
                check.check_is_name(name)
                str = hash.hash_to_person(name)
                output.write(str + "   " + item["name"] + " (" + utility.force_string(item["human-name"]) + ")\n")

        time.sleep(0.5)
        try:
            output.flush()
        except:
            return

    if not results:
        try:
            output.write('No user matching "' + key + '"')
        except:
            pass
    else:
        if results.__len__() == 1:
            msg = "1 user found."
        else:
            msg = "%d users found." % results.__len__()
        output.write(msg)

    pipe.stop()
    try:
        input.close()
        output.close()
        connection.close()
    except:
        # connection reset by peer...
        pass
Exemplo n.º 8
0
def search_task(daemon, query, input, output, connection):

    for char in "+-_.,?()![]":
        query = query.replace(char, " ")
    query = query.lower()
    list = query.split()
    if list:
        key = list[0]
    else:
        key = ""

    if key.__len__() < 3:
        output.write("Keyword %s too short: must be at least 3 characters" % key)
        input.close()
        output.close()
        connection.close()
        return

    pipe = daemon.node.retrieve(hash.hash_of(key))
    results = []
    restricted = 0
    while not pipe.finished() and not restricted:
        for item in pipe.read_all():

            if results.__len__() == 100:
                restricted = 1
                break

            if item[1]["name"] not in results:
                results.append(item[1]["name"])
                filename = utility.force_string(item[1]["filename"])
                extension = string.split(string.split(filename, ".")[-1], "-")[0]
                lext = string.lower(extension)
                if lext in ["mp3", "ogg"]:
                    music = 1
                else:
                    music = 0
                if item[1].has_key("music_title"):
                    ref = utility.force_string(item[1]["music_title"])
                    if ref.strip() == "":
                        ref = filename
                else:
                    ref = utility.force_string(item[1]["filename"])

                length = item[1].get("length")
                if not length:
                    sl = ""
                else:
                    sl = utility.human_size(length)
                output.write(hash.hash_to_url(item[1]["name"]) + " \t" + sl + " \t" + filename + "\n")

        time.sleep(0.5)
        try:
            output.flush()
        except:
            return
    if not results:
        try:
            output.write('No document matching "' + key + '"')
        except:
            pass
    else:
        if results.__len__() == 1:
            msg = "1 file found."
        else:
            msg = "%d files found." % results.__len__()
        output.write(msg)

    pipe.stop()
    try:
        input.close()
        output.close()
        connection.close()
    except:
        pass