def do_search(request, index, searchform): generation_started = time.time() sqlecho = debug_virtual_host(request) try: query = request.GET['q'] except: return render_to_response(index, {'form': searchform, 'types': usertypes}) type = request.GET.get('t', "") types = [] for t in usertypes: nt = dict(t) nt['selected'] = 'selected="selected"' if nt['value'] == type else "" types.append(nt) try: db = connectdb("search") except: return render_to_response('search/error.html', {'form': searchform, 'types': types, 'query': query, 'error':"Unable to connect to the database."}) cursor = db.cursor() parsedq = QueryParser(query + " " + type) if parsedq.geterror() != "": return render_to_response('search/error.html', {'form': searchform, 'types': types, 'query': query, 'error': parsedq.geterror()}) sqlcount = cursor.mogrify(""" SELECT count(*) as count FROM files """ + parsedq.sqlcount(), parsedq.getoptions()) cursor.execute(sqlcount) items = int(cursor.fetchone()['count']) if items == 0 and sqlecho == 0: return render_to_response('search/error.html', {'form': searchform, 'types': types, 'query': query, 'error':"Sorry, nothing found."}) offset, gobar = offset_prepare(request, items, search_items_per_page) parsedq.setoption("offset", offset) parsedq.setoption("limit", search_items_per_page) sqlquery = cursor.mogrify(""" SELECT share_id, protocol, hostname, port, hostaddr, paths.path AS path, files.treedir_id AS dirid, files.name AS filename, files.size AS size, paths.treepath_id as path_id, files.pathfile_id as fileid, shares.state FROM files JOIN paths USING (tree_id, treepath_id) JOIN shares USING (tree_id) """ + parsedq.sqlwhere() + """ ORDER BY """ + parsedq.sqlorder() + """, files.tree_id, files.treepath_id, files.pathfile_id OFFSET %(offset)s LIMIT %(limit)s """, parsedq.getoptions()) cursor.execute(sqlquery) res = cursor.fetchall() result = [] for row in res: newrow = dict() utfpath = unicode(row['path'], "utf-8") utffile = unicode(row['filename'], "utf-8") urlpath = "/" + utfpath if utfpath != "" else "" host = row['hostname'] urlhost = hostname_prepare(request, row['protocol'], row['hostname'], row['hostaddr']) host += ":" + str(row['port']) if row['port'] != 0 else "" urlhost += ":" + str(row['port']) if row['port'] != 0 else "" urlproto = protocol_prepare(request, row['protocol']) viewargs = [row['protocol'], row['hostname'], row['port']] if utfpath != u"": viewargs.append(utfpath) vfs = reverse('webuguu.vfs.views.share', args = viewargs) vfs_offset = int(row['fileid']) / vfs_items_per_page newrow['pathlink'] = vfs + "?" + urlencode(dict( [('s', row['share_id']), ('p', row['path_id'])] + ([('o', vfs_offset)] if vfs_offset > 0 else []))) newrow['filename'] = utffile if row['dirid'] > 0: newrow['type'] = "<dir>" newrow['filelink'] = vfs + utffile + "/?" + \ urlencode({'s': row['share_id'], 'p': row['dirid']}) else: newrow['type'] = "" newrow['filelink'] = urlproto + urlhost \ + urlquote(urlpath + "/" + utffile) newrow['path'] = row['protocol'] + "://" + host + urlpath newrow['size'] = row['size'] newrow['state'] = row['state'] result.append(newrow) del row del res fastselflink = "./?" + urlencode(dict([('q', query), ('t', type)])) if parsedq.getoptions()['output'] == "html": return render_to_response('search/results.html', {'form': searchform, 'query': query, 'types': types, 'results': result, 'offset': offset, 'fastself': fastselflink, 'gobar': gobar, 'sqlecho': sqlecho, 'sqlcount': sqlcount, 'sqlquery': sqlquery, 'gentime': time.time() - generation_started, }) elif parsedq.getoptions()['output'] == "rss": feed = feedgenerator.Rss201rev2Feed( title = u"Search Results", link = reverse('webuguu.search.views.search'), description = u"Results of a query: " + query, language = u"en") curdescs = [] curname = "" for file in result: if file['filename'] != curname: if curname != "": rss_feed_add_item(request, feed, curname, string.join(curdescs, "<br>")) curdescs = [] curname = file['filename'] tmpl = Template('<a href="{{r.pathlink|iriencode}}">{{r.path}}</a>' + '/<a class="share" href="{{r.filelink|iriencode}}">{{r.filename}}</a>' + ' ({{r.size|filesizeformat}} {{r.state}})') ctx = Context({'r':file}) curdescs.append(tmpl.render(ctx)) if len(curdescs) > 0: rss_feed_add_item(request, feed, curname, string.join(curdescs, "<br>")) return HttpResponse(feed.writeString('UTF-8')) else: return render_to_response('search/error.html', {'form': searchform, 'types': types, 'query': query, 'error':"Unsupported output."})
def share(request, proto, hostname, port, path = ""): generation_started = time.time() try: db = connectdb("vfs") except: return render_to_response('vfs/error.html', {'error':"Unable to connect to the database."}) if proto not in known_protocols: return render_to_response('vfs/error.html', {'error':"Unsupported protocol: '%s'" % proto}) cursor = db.cursor() try: share_id = int(request.GET.get('s', 0)) path_id = int(request.GET.get('p', 0)) page_offset = int(request.GET.get('o', 0)) order = request.GET.get('order') goup = int(request.GET.get('up', 0)) url = dict() url['share'] = [('s', share_id)] url['order'] = [('order', order)] if order != None else [] except: return render_to_response('vfs/error.html', {'error':"Wrong parameters."}) # detect share if share_id != 0: cursor.execute(""" SELECT tree_id, hostaddr, state, last_scan, last_state_change FROM shares WHERE share_id = %(s)s AND protocol = %(pr)s AND hostname = %(h)s AND port = %(p)s """, {'s':share_id, 'pr': proto, 'h': hostname, 'p': port}) try: tree_id, hostaddr, state, scantime, changetime = cursor.fetchone() except: return HttpResponseRedirect(".") else: cursor.execute(""" SELECT share_id, tree_id, hostaddr, state, last_scan, last_state_change FROM shares WHERE protocol = %(p)s AND hostname = %(h)s AND port = %(port)s """, {'p': proto, 'h': hostname, 'port': port}) try: share_id, tree_id, hostaddr, state, scantime, changetime = cursor.fetchone() url['share'] = [('s', share_id)] except: return render_to_response('vfs/error.html', {'error':"Unknown share."}) if scantime == None: return render_to_response('vfs/error.html', {'error':"Sorry, this share hasn't been scanned yet."}) # detect path if goup > 0: try: cursor.execute("SELECT * FROM path_goup(%(t)s, %(p)s, %(l)s)", {'t': tree_id, 'p': path_id, 'l': goup}) path_id, page_offset = cursor.fetchone() page_offset = page_offset / vfs_items_per_page # update get to open the correct page request.GET = request.GET.copy() request.GET.update({'o': page_offset}) except: return render_to_response('vfs/error.html', {'error':"Wrong parameters."}) url['path'] = [('p', path_id)] url['offset'] = [('o', page_offset)] if page_offset > 0 else [] if path_id != 0: redirect_url = "./?" + urlencode(dict(url['share'] + url['offset'])) cursor.execute(""" SELECT path, parent_id, parentfile_id, items, size FROM paths WHERE tree_id = %(t)s AND treepath_id = %(p)s """, {'t':tree_id, 'p':path_id}) try: dbpath, parent_id, parentfile_id, items, size = cursor.fetchone() if path != unicode(dbpath, "utf-8"): return HttpResponseRedirect(redirect_url) except: return HttpResponseRedirect(redirect_url) else: cursor.execute(""" SELECT treepath_id, parent_id, parentfile_id, items, size FROM paths WHERE tree_id = %(t)s AND path = %(p)s """, {'t': tree_id, 'p': path}) try: path_id, parent_id, parentfile_id, items, size = cursor.fetchone() url['path'] = [('p', path_id)] except: return render_to_response('vfs/error.html', {'error':"No such file or directory: '" + path + "'"}) try: path_id = int(path_id) parent_id = int(parent_id) parentfile_id = int(parentfile_id) items = int(items) size = int(size) except: return render_to_response('vfs/error.html', {'error':"Seems like directory '%s' has not been scanned properly." % path}) # detect offset in file list and fill offset bar offset, gobar = offset_prepare(request, items, vfs_items_per_page) # get file list orders = { 'size': """ SELECT treedir_id AS dirid, created, size, name FROM files WHERE tree_id = %(t)s AND treepath_id = %(p)s ORDER BY size DESC OFFSET %(o)s LIMIT %(l)s; """, 'name': """ SELECT treedir_id AS dirid, created, size, name FROM files WHERE tree_id = %(t)s AND treepath_id = %(p)s AND pathfile_id >= %(o)s ORDER BY pathfile_id LIMIT %(l)s; """ } if not orders.get(order): order = 'name' cursor.execute(orders[order], {'t': tree_id, 'p': path_id, 'o': offset, 'l':vfs_items_per_page}) # some additional variables for template hostaddr = hostname_prepare(request, proto, hostname, hostaddr) if port != "0": hostname += ":" + port hostaddr += ":" + port if path != "": path = "/" + path urlproto = protocol_prepare(request, proto) if parent_id != 0: uplink_offset = int(parentfile_id) / vfs_items_per_page fastuplink = "?" + urlencode(dict( url['share'] + [('p', parent_id)] + ([('o', uplink_offset)] if uplink_offset > 0 else []))) else: fastuplink = "" fastselflink = "./?" + urlencode(dict(url['share'] + url['path'] + url['order'])) orderbar = dict() orderbar['nontrivial'] = (cursor.rowcount > 1) orderbar['order'] = "./?" + urlencode(dict(url['share'] + url['path'])) orderbar['orders'] = [{'n': k, 's': k == order} for k in orders.keys()] return render_to_response('vfs/share.html', \ {'files': cursor.fetchall(), 'protocol': proto, 'urlproto': urlproto, 'urlhost': hostname, 'urladdr': hostaddr, 'urlpath': path, 'items': items, 'size': size, 'share_id': share_id, 'fastup': fastuplink, 'fastself': fastselflink, 'offset': offset, 'gobar': gobar, 'orderbar': orderbar, 'state': state, 'changetime': changetime, 'scantime': scantime, 'gentime': time.time() - generation_started, 'now': datetime.datetime.now(), })