def download(self, **args): args = self.clean_args(args) filepath = self.getDownload(args) if type(filepath).__name__=="str": return serve_file(filepath, "application/x-download", "attachment", name='data.csv') if type(filepath).__name__=="instance": return serve_fileobj(filepath.getvalue(), "application/x-download", "attachment", name='data.csv') if type(filepath).__name__=="StringIO": return serve_fileobj(filepath.getvalue().encode('utf-8'), "application/x-download", "attachment", name='data.csv') else: return "error downloading file. filepath must be string of buffer"
def silolevel(self, **args): if not polling: return None try: reset_level=dbus.getItem('silo_reset_level') reset_time=dbus.getItem('silo_reset_time') reset_time = datetime.strptime(reset_time,'%d/%m/%y %H:%M') reset_time = mktime(reset_time.timetuple()) except: return None if not cherrypy.request.params.get('maxWidth'): maxWidth = '440'; # Default bootstrap 3 grid size else: maxWidth = cherrypy.request.params.get('maxWidth') now=int(time()) start=int(reset_time) RrdGraphString1= "rrdtool graph - --lower-limit 0 --disable-rrdtool-tag --full-size-mode --width %s --right-axis 1:0 --right-axis-format %%1.1lf --height 400 --end %u --start %u "%(maxWidth, now,start) RrdGraphString1+=" DEF:a=%s:feeder_time:AVERAGE DEF:b=%s:feeder_capacity:AVERAGE"%(db,db) RrdGraphString1+=" CDEF:t=a,POP,TIME CDEF:tt=PREV\(t\) CDEF:i=t,tt,-" #RrdGraphString1+=" CDEF:a1=t,%u,GT,tt,%u,LE,%s,0,IF,0,IF"%(start,start,reset_level) #RrdGraphString1+=" CDEF:a2=t,%u,GT,tt,%u,LE,3000,0,IF,0,IF"%(start+864000*7,start+864000*7) #RrdGraphString1+=" CDEF:s1=t,%u,GT,tt,%u,LE,%s,0,IF,0,IF"%(start, start, reset_level) RrdGraphString1+=" CDEF:s1=t,POP,COUNT,1,EQ,%s,0,IF"%reset_level RrdGraphString1+=" CDEF:s=a,b,*,360000,/,i,*" RrdGraphString1+=" CDEF:fs=s,UN,0,s,IF" RrdGraphString1+=" CDEF:c=s1,0,EQ,PREV,UN,0,PREV,IF,fs,-,s1,IF AREA:c#d6e4e9" print RrdGraphString1 cmd = subprocess.Popen(RrdGraphString1, shell=True, stdout=subprocess.PIPE) cmd.wait() cherrypy.response.headers['Pragma'] = 'no-cache' return serve_fileobj(cmd.stdout)
def export(self, burst_id): export_manager = ExportManager() export_json = export_manager.export_burst(burst_id) result_name = "tvb_simulation_" + str(burst_id) + ".json" return serve_fileobj(export_json, "application/x-download", "attachment", result_name)
def export_objects_json(self, objuuids): add_message("inventory controller: exporting inventory objects...") try: collection = Collection("inventory") inventory = {} for objuuid in objuuids.split(","): current = collection.get_object(objuuid) if current.object["type"] != "binary file": inventory[objuuid] = current.object add_message("inventory controller: exported: {0}, type: {1}, name: {2}".format(objuuid, current.object["type"], current.object["name"])) cherrypy.response.headers['Content-Type'] = "application/x-download" cherrypy.response.headers['Content-Disposition'] = 'attachment; filename=export.{0}.json'.format(time()) #create_inventory_export_event(Collection("users").find(sessionid = cherrypy.session.id)[0], objuuids.split(",")) add_message("INVENTORY EXPORT COMPLETE") return serve_fileobj(json.dumps(inventory)) except Exception: add_message(traceback.format_exc())
def fetchData(self): if self.data == 'OK': logger.fdebug('Recieved OPDS command: ' + self.cmd) methodToCall = getattr(self, "_" + self.cmd) result = methodToCall(**self.kwargs) if self.img: if type(self.img) == tuple: iformat, idata = self.img return serve_fileobj(BytesIO(idata), content_type='image/' + iformat) else: return serve_file(path=self.img, content_type='image/jpeg') if self.file and self.filename: if self.issue_id: try: logger.fdebug('OPDS is attempting to markasRead filename %s aka issue_id %s' % (self.filename, self.issue_id)) readinglist.Readinglist().markasRead(IssueID=self.issue_id) except: logger.fdebug('No reading list found to update.') return serve_download(path=self.file, name=self.filename) if isinstance(self.data, str): return self.data else: cherrypy.response.headers['Content-Type'] = "text/xml" return serve_template(templatename="opds.html", title=self.data['title'], opds=self.data) else: return self.data
def genfacts(self, low=0, high=12, amount=30, op=''): fs = facts.generatefacts(int(low),int(high),int(amount),op) latex.buildlatexfile(fs) filename = 'arithmetic.pdf' call('pdflatex arithmetic.tex', shell=True) returnfile = open(filename,'rb') return serve_fileobj(returnfile,disposition='attachment', content_type='application/pdf',name=filename)
def export(self, group_id=None, **kwargs): form = ExportForm(request_params(), group_id=group_id) form.group_id.choices = [(g.id, g.name) for g in groups.list()] exporter_choices = [('yaml', 'YAML (GPG/PGP-encrypted)')] if config['export.keepass.enabled']: if not os.path.exists(config['export.keepass.exe_path']): log.error("KeePass export enabled, but specified converter script does not exist: {0}".format(config.get('export.keepass.exe_path'))) else: exporter_choices.append(('kdb', 'KeePass 1.x')) form.format.choices = exporter_choices if cherrypy.request.method == 'POST': if form.validate(): group = groups.get(form.group_id.data) if form.format.data == 'yaml': exporter = GpgYamlExporter(use_tags=False, passphrase=form.passphrase.data, resource_filters=[model.GroupResource.group_id==group.id]) # @UndefinedVariable encrypted_stream = BytesIO() exporter.export(stream=encrypted_stream) encrypted_stream.seek(0) # Just to ensure it's rewound return serve_fileobj(encrypted_stream, content_type='application/pgp-encrypted', disposition='attachment', name='group-{0}-export.pgp'.format(re.sub('[^\w\-\.]', '_', group.name))) elif form.format.data == 'kdb': exporter = KeepassExporter(passphrase=form.passphrase.data, resource_filters=[model.GroupResource.group_id==group.id]) # @UndefinedVariable encrypted_stream = BytesIO() exporter.export(stream=encrypted_stream) encrypted_stream.seek(0) # Just to ensure it's rewound return serve_fileobj(encrypted_stream, content_type='application/x-keepass-database', disposition='attachment', name='group-{0}-export.kdb'.format(re.sub('[^\w\-\.]', '_', group.name))) else: # I don't think we can get here in normal business. raise RuntimeError("Unhandled format specified: {0}".format(form.format.data)) else: # does not validate return render("group/export.html", {'form': form}) else: # request method is GET return render("group/export.html", {'form': form})
def download(self, **args): args = self.clean_args(args) filepath = self.getDownload(args) if type(filepath).__name__=="str": return serve_file(filepath, "application/x-download", "attachment", name='data.csv') if type(filepath).__name__=="instance": return serve_fileobj(filepath.getvalue(), "application/x-download", "attachment", name='data.csv') else: return "error downloading file. filepath must be string of buffer"
def index(self, id): image = genImage.makeScore(id) buffer = BytesIO() image.save(buffer, "png") buffer.seek(0) return serve_fileobj(buffer, "application/x-download", "attachment", name="pauta.png")
def _serve_result(self, result): hdr, data = result cp.response.headers['Content-Type'] = hdr['Content-Type'] if isinstance(data, basestring): return data if is_fileobject(data): return serve_fileobj(data) if isinstance(data, FileResult): return serve_file(data.path)
def file(self, id=None, fmt='tamchy'): stream = self.Client.get(id) if stream is None: raise cherrypy.HTTPError(404, 'No matching stream') if fmt == 'tamchy': return serve_fileobj(stream.get_file(), "application/x-download", disposition='attachment', name=stream.name + '.tamchy') if fmt == 'playlist': playlist = '''#EXTM3U\n#EXTINF:-1, {0}\n{1}'''.format( stream.name, self.config['HTTP_HOST'] + ':' + str(self.config['HTTP_PORT']) + '/stream/' + id) return serve_fileobj(StringIO(playlist), "application/x-download", disposition='attachment', name=stream.name + '.m3u')
def consumption7d(self): if not self.polling: return None now = int(time()) align = int(now) / 86400 * 86400 - (localtime(now).tm_hour - int(now) % 86400 / 3600) * 3600 RrdGraphString = make_barchart_string(self.db, now, align, 86400, 7, "-", 550, "last week", "kg/day") cmd = subprocess.Popen(RrdGraphString + "--height 320", shell=True, stdout=subprocess.PIPE) cmd.wait() cherrypy.response.headers["Pragma"] = "no-cache" return serve_fileobj(cmd.stdout)
def genfacts(self, low=0, high=12, amount=30, op=''): fs = facts.generatefacts(int(low), int(high), int(amount), op) latex.buildlatexfile(fs) filename = 'arithmetic.pdf' call('pdflatex arithmetic.tex', shell=True) returnfile = open(filename, 'rb') return serve_fileobj(returnfile, disposition='attachment', content_type='application/pdf', name=filename)
def consumption24h(self): if not self.polling: return None now = int(time()) align = now / 3600 * 3600 RrdGraphString = make_barchart_string(self.db, now, align, 3600, 24, "-", 550, "24h consumption", "kg/h") cmd = subprocess.Popen(RrdGraphString + "--height 320", shell=True, stdout=subprocess.PIPE) cmd.wait() cherrypy.response.headers["Pragma"] = "no-cache" return serve_fileobj(cmd.stdout)
def _serveRemoteContent( self, url, method="GET", args={} ): if method == "GET" and len( args ) > 0: url = url + "?" + urllib.urlencode( args ) content, code, mime = aminopvr.tools.getPage( url, None, method=method, args=args ) if code == 200: if "Content-Encoding: gzip" in str( mime ): cherrypy.response.headers["Content-Encoding"]= "gzip" return serve_fileobj( content, content_type=mime.gettype() ) else: raise cherrypy.HTTPError( code )
def render_plugin(self, path, template=template, merge_item_keys=("Video", "Directory"), only_return_items=False): content = self.plex_dispatch(path) try: has_content = int(content["size"]) > 0 except ValueError: has_content = False if not has_content: redirect = content.get("title2", None) # this is basically meant for SZ. title2 can contain a full URL to which we will redirect if redirect and self.is_url(redirect): if self.connections: f = furl(redirect) # try finding the current PMS in the link is_current_pms = filter( lambda c: c["address"] == f.host or f.host in c["url"], self.connections) if is_current_pms: # use current PMS connection for the link con = furl(self.server_addr) f.host = con.host f.port = con.port redirect = f r = requests.get(f) # special handling for data if r.headers['content-type'] != 'text/html': data = io.BytesIO(r.content) # set headers for hdr in ("Content-Type", "Content-Disposition", "Content-Length"): cherrypy.response.headers[hdr] = r.headers[hdr] # serve return static.serve_fileobj(data) raise cherrypy.HTTPRedirect(redirect) message("No plugin data returned", "WARNING") print("No plugin data returned, returning to plugin selection") self.plugin = None raise cherrypy.HTTPRedirect(cherrypy.url("/")) items = self.merge_plugin_data(content, keys=merge_item_keys) if only_return_items: return items return template.render(data=content, items=items, **self.default_context)
def GET(self, uid=None): print(uid) if uuid is None: raise cherrypy.HTTPRedirect("/") data = collections.OrderedDict() with sqlite3.connect(DB_STRING) as db: c = db.cursor() c.execute( "select scaffold_name, sequence_data " + "from uuid_scaffold where uid = ? order by scaffold_name ", (uid,)) scaffolds = c.fetchall() for scaffold in scaffolds: data[scaffold[0]] = predictor.ScaffoldData(scaffold[1], []) c.execute( "select * from uuid_gene where uid = ? and scaffold_name = ? order by start_pos", (uid, scaffold[0])) genes = c.fetchall() for gene in genes: data[scaffold[0]][1].append(predictor.GeneData( 0, gene[3], gene[4], (gene[5], gene[6], gene[7], gene[8]), (gene[9], gene[10]), gene[11])) # print(genes) c.execute("SELECT tot_gc FROM uuid_tot where uid = ?", (uid,)) gc_percentage = c.fetchone() formatter = predictor.GFF3Formatter() gff3_formatted = formatter.format_output(data, gc_percentage[0]) gff3_file = io.StringIO("") for line in gff3_formatted: print(line, file=gff3_file) gff3_file.seek(0) return serve_fileobj( gff3_file, content_type="text/plain; charset=utf-8", disposition="attachment", name="predictions.gff3")
def GET(self, uid=None): print(uid) if uuid is None: raise cherrypy.HTTPRedirect("/") data = collections.OrderedDict() with sqlite3.connect(DB_STRING) as db: c = db.cursor() c.execute( "select scaffold_name, sequence_data " + "from uuid_scaffold where uid = ? order by scaffold_name ", (uid, )) scaffolds = c.fetchall() for scaffold in scaffolds: data[scaffold[0]] = predictor.ScaffoldData(scaffold[1], []) c.execute( "select * from uuid_gene where uid = ? and scaffold_name = ? order by start_pos", (uid, scaffold[0])) genes = c.fetchall() for gene in genes: data[scaffold[0]][1].append( predictor.GeneData( 0, gene[3], gene[4], (gene[5], gene[6], gene[7], gene[8]), (gene[9], gene[10]), gene[11])) # print(genes) c.execute("SELECT tot_gc FROM uuid_tot where uid = ?", (uid, )) gc_percentage = c.fetchone() formatter = predictor.GFF3Formatter() gff3_formatted = formatter.format_output(data, gc_percentage[0]) gff3_file = io.StringIO("") for line in gff3_formatted: print(line, file=gff3_file) gff3_file.seek(0) return serve_fileobj(gff3_file, content_type="text/plain; charset=utf-8", disposition="attachment", name="predictions.gff3")
def serve_resource(path): if main_is_frozen(): path = path.replace("/", "\\") if RES_ID.has_key(path): return serve_fileobj( StringIO.StringIO(win32api.LoadResource(0, u'RESOURCE', RES_ID[path])), content_type=get_content_type(path) ) else: raise cherrypy.NotFound else: return serve_file(os.path.abspath(os.path.join(get_res_dir(), path)))
def export_csv(self, plot, subplot, line): # jplot, sp_no, line_no = web.cherrypy.request.json jplot = json.loads(plot) plot: Plot = Plot(**jplot) sp = plot.subplots[int(subplot) - 1] line = sp.lines[int(line)] filename = ''.join(c if (c.isalnum() or c in '.-_') else '_' for c in line.name) + '.csv' buffer = io.StringIO() line.export_csv(io, plot.start, plot.end) buffer.seek(0) return serve_fileobj(buffer, str(web.mime.csv), 'attachment', filename)
def consumption1y(self): if not self.polling: return None now = int(time()) align = ( now / int(31556952 / 12) * int(31556952 / 12) - (localtime(now).tm_hour - int(now) % 86400 / 3600) * 3600 ) RrdGraphString = make_barchart_string(self.db, now, align, 2628000, 12, "-", 550, "last year", "kg/month") cmd = subprocess.Popen(RrdGraphString + "--height 320", shell=True, stdout=subprocess.PIPE) cmd.wait() cherrypy.response.headers["Pragma"] = "no-cache" return serve_fileobj(cmd.stdout)
def download(self): video_file = self._get_first_video_file( cherrypy.engine.nzbdownloader.downloader.extractor.files) if not video_file: return 'Not ready!' return serve_fileobj(filewrapper.FileWrapper(video_file['path'], video_file['size']), content_type='application/x-download', content_length=video_file['size'], disposition='attachment', name=os.path.basename(video_file['path']))
def serve_resource(path): if main_is_frozen(): path = path.replace("/", "\\") if RES_ID.has_key(path): return serve_fileobj( StringIO.StringIO(win32api.LoadResource(0, u"RESOURCE", RES_ID[path])), content_type=get_content_type(path), ) else: raise cherrypy.NotFound else: return serve_file(os.path.abspath(os.path.join(get_res_dir(), path)))
def _serveRemoteContent(self, url, method="GET", args={}): if method == "GET" and len(args) > 0: url = url + "?" + urllib.urlencode(args) content, code, mime = aminopvr.tools.getPage(url, None, method=method, args=args) if code == 200: if "Content-Encoding: gzip" in str(mime): cherrypy.response.headers["Content-Encoding"] = "gzip" return serve_fileobj(content, content_type=mime.gettype()) else: raise cherrypy.HTTPError(code)
def video(self): video_file = self._get_first_video_file(cherrypy.engine.nzbdownloader.downloader.extractor.files) if not video_file: return 'Not ready!' content_type = mimetypes.types_map.get(os.path.splitext(video_file['path']), None) if not content_type: if video_file['path'].endswith('.mkv'): content_type = 'video/x-matroska' elif video_file['path'].endswith('.mp4'): content_type = 'video/mp4' return serve_fileobj(filewrapper.FileWrapper(video_file['path'], video_file['size']), content_type=content_type, content_length=video_file['size'], name=os.path.basename(video_file['path']))
def export(self, format='xlsx'): from ...tools.exportdatasets import export_dataframe with db.session_scope() as session: q = session.query(db.Site) dataframe = pd.read_sql(q.statement, session.bind) buffer = io.BytesIO() mime = web.mime.get(format, web.mime.binary) buffer = export_dataframe(buffer, dataframe, format, index_label=None) name = f'sites-{datetime.datetime.now():%Y-%m-%d}' buffer.seek(0) return serve_fileobj(buffer, str(mime), 'attachment', name + '.' + format)
def consumption(self, **args): if not polling: return None if consumption_graph: if not cherrypy.request.params.get('maxWidth'): maxWidth = '440'; # Default bootstrap 3 grid size else: maxWidth = cherrypy.request.params.get('maxWidth') now = int(time()) align = now/3600*3600 RrdGraphString = make_barchart_string(db, now, align, 3600, 24, '-', maxWidth, '24h consumption', 'kg/h') cmd = subprocess.Popen(RrdGraphString, shell=True, stdout=subprocess.PIPE) cmd.wait() cherrypy.response.headers['Pragma'] = 'no-cache' return serve_fileobj(cmd.stdout)
def consumption1m(self): if not self.polling: return None now = int(time()) align = ( int(now + 4 * 86400) / (86400 * 7) * (86400 * 7) - (localtime(now).tm_hour - int(now) % 86400 / 3600) * 3600 - 4 * 86400 ) RrdGraphString = make_barchart_string( self.db, time(), align, 86400 * 7, 8, "-", 550, "last two months", "kg/week" ) cmd = subprocess.Popen(RrdGraphString + "--height 320", shell=True, stdout=subprocess.PIPE) cmd.wait() cherrypy.response.headers["Pragma"] = "no-cache" return serve_fileobj(cmd.stdout)
def export(self, plot, fileformat, timeindex, tolerance, grid, interpolation_method, interpolation_limit): """ TODO: Compare to exportall_csv and RegularExport Parameters ---------- plot: The plot as JSON fileformat: csv, xlsx, pickle, tsv, json tolerance: in seconds timeindex: as_first, all_timesteps, regular Returns ------- """ from ..tools.exportdatasets import merge_series, export_dataframe if fileformat not in ('xlsx', 'csv', 'tsv', 'pickle', 'json', 'msgpack'): raise web.HTTPError(500, 'Unknown fileformat: ' + fileformat) plot_dict = web.json.loads(plot) plot: Plot = Plot(**plot_dict) lines = [line for lines in plot.subplots for line in lines] series = [line.load(plot.start, plot.end) for line in lines] # Convert timeindex to int if possible timeindex = web.conv(int, timeindex, timeindex) # If timeindex is 'regular', replace with time grid if timeindex == 'regular': timeindex = grid try: tolerance = pd.Timedelta(tolerance or '0s') interpolation_limit = web.conv(int, interpolation_limit) dataframe = merge_series(series, timeindex, tolerance, interpolation_method, interpolation_limit) except Exception as e: raise web.redirect(url='/plot', error=str(e)) buffer = io.BytesIO() mime = web.mime.get(fileformat, web.mime.binary) buffer = export_dataframe(buffer, dataframe, fileformat) plotname = plot.name or f'export-{datetime.now():%Y-%m-%d_%H-%M}' buffer.seek(0) return serve_fileobj(buffer, str(mime), 'attachment', plotname + '.' + fileformat)
def video(self): video_file = self._get_first_video_file( cherrypy.engine.nzbdownloader.downloader.extractor.files) if not video_file: return 'Not ready!' content_type = mimetypes.types_map.get( os.path.splitext(video_file['path']), None) if not content_type: if video_file['path'].endswith('.mkv'): content_type = 'video/x-matroska' elif video_file['path'].endswith('.mp4'): content_type = 'video/mp4' return serve_fileobj(filewrapper.FileWrapper(video_file['path'], video_file['size']), content_type=content_type, content_length=video_file['size'], name=os.path.basename(video_file['path']))
def export_objects_zip(self, objuuids): add_message("inventory controller: exporting inventory objects...") try: collection = Collection("inventory") inventory = {} dstuuids = [] for objuuid in objuuids.split(","): current = collection.get_object(objuuid) inventory[objuuid] = current.object if current.object["type"] == "binary file": dstuuids.append(current.object["sequuid"]) add_message("inventory controller: exported: {0}, type: {1}, name: {2}".format(objuuid, current.object["type"], current.object["name"])) cherrypy.response.headers['Content-Type'] = "application/x-download" cherrypy.response.headers['Content-Disposition'] = 'attachment; filename=export.{0}.zip'.format(time()) mem_file = StringIO.StringIO() with zipfile.ZipFile(mem_file, mode = 'w', compression = zipfile.ZIP_DEFLATED) as zf: zf.writestr('inventory.json', json.dumps(inventory)) for dstuuid in dstuuids: zf.writestr('{0}.bin'.format(dstuuid), buffer(DatastoreFile(dstuuid).read())) #create_inventory_export_event(Collection("users").find(sessionid = cherrypy.session.id)[0], objuuids.split(",")) add_message("INVENTORY EXPORT COMPLETE") return serve_fileobj(mem_file.getvalue()) except Exception: add_message(traceback.format_exc())
def stringio(self): f = StringIO.StringIO('Fee\nfie\nfo\nfum') return static.serve_fileobj(f, content_type='text/plain')
def download(self,mydownload): self.k.key = mydownload DownloadFile = self.k.get_contents_as_string() # test = self.videos.get_item(id=str(self.id)) # test.delete() return static.serve_fileobj(DownloadFile,disposition='attachment',content_type=mydownload,name=mydownload)
def bytesio(self): f = io.BytesIO(b'Fee\nfie\nfo\nfum') return static.serve_fileobj(f, content_type='text/plain')
def fileobj(self): f = open(os.path.join(curdir, 'style.css'), 'rb') return static.serve_fileobj(f, content_type='text/css')
def serve_fileobj_utf8_filename(self): return static.serve_fileobj( io.BytesIO('☃\nfie\nfo\nfum'.encode('utf-8')), disposition='attachment', name='has_utf-8_character_☃.html')
def bytesio(self): f = BytesIO(ntob('Fee\nfie\nfo\nfum')) return static.serve_fileobj(f, content_type='text/plain')
def download(self): video_file = self._get_first_video_file(cherrypy.engine.nzbdownloader.downloader.extractor.files) if not video_file: return 'Not ready!' return serve_fileobj(filewrapper.FileWrapper(video_file['path'], video_file['size']), content_type='application/x-download', content_length=video_file['size'], disposition='attachment', name=os.path.basename(video_file['path']))
def image(self, **args): if not polling: return None if len(colorsDict) == 0: return None try: timeChoice = args['timeChoice'] timeChoice = timeChoices.index(timeChoice) cherrypy.session['timeChoice'] = timeChoice except: pass try: timeChoice = cherrypy.session['timeChoice'] seconds=timeSeconds[timeChoice] except: seconds=timeSeconds[0] # Set time offset with ?time=xx try: time = int(args['time']) # And save it in the session cherrypy.session['time'] = str(time) except: try: time = int(cherrypy.session['time']) except: time = 0 try: direction = args['direction'] if direction == 'left': time=time+seconds elif direction == 'right': time=time-seconds if time<0: time=0 cherrypy.session['time']=str(time) except: pass try: graphWidth = args.get('maxWidth') test = int(graphWidth) # should be int except: graphWidth = '440' # Default bootstrap 3 grid size graphTimeStart=str(seconds + time) graphTimeEnd=str(time) #Build the command string to make a graph from the database graph_file='-' if int(graphWidth)>500: rightaxis = '--right-axis 1:0' else: rightaxis = '' RrdGraphString1 = "rrdtool graph "+ graph_file + ' --disable-rrdtool-tag' +\ " --lower-limit 0 %s --full-size-mode --width "%rightaxis + graphWidth + \ " --height 400 --end now-" + graphTimeEnd + "s --start now-" + graphTimeStart + "s " + \ "DEF:tickmark=%s:_logtick:AVERAGE TICK:tickmark#E7E7E7:1.0 "%db for key,value in polldata: if cherrypy.session.get(value)!='no' and colorsDict.has_key(key): RrdGraphString1=RrdGraphString1+"DEF:%s="%value+db+":%s:AVERAGE LINE1:%s%s:\"%s\" "% (ds_names[key], value, colorsDict[key], value) cmd = subprocess.Popen(RrdGraphString1, shell=True, stdout=subprocess.PIPE) cmd.wait() cherrypy.response.headers['Pragma'] = 'no-cache' return serve_fileobj(cmd.stdout)