def most_recent(): if "since" in request.args: d = request.args.get("since") ts = dateutil.parser.parse(d) cur.execute(SQL_filtered, (ts, )) else: cur.execute(SQL_unfiltered) data = cur.fetchall() return jsonify(minify(map(lambda x: x[0], data)))
def refresh_timeseries(): """ Refreshes timeseries data S3 cache for use with the main graph (i.e. viewers, chats, emotes, total donation number/amount) """ SQL = """ SELECT row_to_json(r) r FROM (SELECT * FROM gdq_timeseries ORDER BY time ASC) r; """ cur.execute(SQL) data = cur.fetchall() data_json = json.dumps(minify([x[0] for x in data])) s3.Bucket(BUCKET).put_object( Key="latest.json", Body=data_json, ContentType="application/json", Expires=datetime.utcnow() + timedelta(minutes=10), ) return data_json
def page(p): print "building page", p.human_name # beginning of head s = '<!DOCTYPE html><html>' s += '<head>' s += '<title>{{sn}} | ' + p.human_name + '</title>' s += parts.load('favicon') # style if config.use_less: s += parts.load('bootstrap_less') else: s += parts.load('bootstrap_css') s += parts.load('js_libs') s += parts.load('google_analytics') # end head, start page s += '</head><body>' # minify everything we have so far s = utils.minify(s) # main wrapper s += '<div id="wrapper">' # top bar and navigation s += utils.minify(nav_html()) # start bg wrapper s += '<div id="bg-wrapper">' # sidebar s += sidebar(p) # start container s += '<div class="container" style="width: 800px;">' # start content s += '<div id="page-body">' s += p.breadcrumb() s += utils.md(p.content) s += utils.minify(comments(p)) # end content s += '</div>' # end container s += '</div>' # end bg wrapper s += '</div>' s += '<div class="push"></div>' # end main wrapper s += '</div>' s += utils.minify(parts.load('footer')) s += '</body>' s += '</html>' return s
def handle(self, *args, **options): from django.conf import settings static_root = settings.STATIC_ROOT if settings.STATICFILES_STORAGE != "django.contrib.staticfiles.storage.ManifestStaticFilesStorage": raise ImproperlyConfigured( "The publishstatic command only works " "properly if STATICFILES_STORAGE is set to " "'django.contrib.staticfiles.storage.ManifestStaticFilesStorage'." ) # Get config self.storage_name = settings.PUBLISH_STORAGE_ENGINE if not self.storage_name: raise ImproperlyConfigured("Please set PUBLISH_STORAGE_ENGINE in " "your settings file.") if options["quiet"]: sys.stdout = BytesIO() if options["bucket"]: bucket_name = options["bucket"] else: bucket_name = settings.PUBLISH_BUCKET_NAME or settings.PUBLISH_ROOT if not bucket_name: raise ImproperlyConfigured( "Please set a bucket name for the static" " files with either --bucket or the setting AWS_BUCKET_NAME" ) if options["overwrite"]: self.delete_json() storage = Storage.factory(self.storage_name, bucket_name) self.stdout.write("Using storage engine: {}".format(storage)) directory = options.get("directory") or "" files_to_upload = self.determine_files_to_upload() published_files = self.load_json() for filename in files_to_upload: if options["pattern"] and not fnmatch(filename, options["pattern"]): continue filetype, encoding = mimetypes.guess_type(filename) filetype = filetype or "application/octet-stream" full_path = os.path.join(static_root, filename) states = set() with open(full_path, "rb") as handle: stream = BytesIO(handle.read()) if options["minify"]: stream, state = minify(filename, filetype, stream) if state != "": states.add(state) # Set default headers headers = {"Content-Type": filetype, "Cache-Control": "max-age=%d" % (3600 * 24 * 365)} # Gzip if compressable if options["gzip"] is True and filetype in COMPRESSIBLE: headers, stream = gzip_content(headers, stream) states.add("gzipped") self.stdout.write("{} {}".format(filename, list(states) if states else "")) # Send to S3 storage.upload(stream, os.path.join(directory, filename), headers) published_files.add(filename) self.save_json(published_files) if len(files_to_upload) == 0: self.stdout.write("No files changed on {} storage.".format(self.storage_name)) else: self.stdout.write( "Successfully uploaded {} files to {} bucket {}.".format( len(files_to_upload), self.storage_name, bucket_name ) )
def handle(self, *args, **options): from django.conf import settings static_root = settings.STATIC_ROOT if settings.STATICFILES_STORAGE != 'django.contrib.staticfiles.storage.ManifestStaticFilesStorage': raise ImproperlyConfigured( "The publishstatic command only works " "properly if STATICFILES_STORAGE is set to " "'django.contrib.staticfiles.storage.ManifestStaticFilesStorage'." ) # Get config self.storage_name = settings.PUBLISH_STORAGE_ENGINE if not self.storage_name: raise ImproperlyConfigured("Please set PUBLISH_STORAGE_ENGINE in " "your settings file.") if options['quiet']: sys.stdout = BytesIO() if options['bucket']: bucket_name = options['bucket'] else: bucket_name = settings.PUBLISH_BUCKET_NAME or settings.PUBLISH_ROOT if not bucket_name: raise ImproperlyConfigured( "Please set a bucket name for the static" " files with either --bucket or the setting AWS_BUCKET_NAME") if options['overwrite']: self.delete_json() storage = Storage.factory(self.storage_name, bucket_name) self.stdout.write("Using storage engine: {}".format(storage)) directory = options.get('directory') or '' files_to_upload = self.determine_files_to_upload() published_files = self.load_json() for filename in files_to_upload: if options['pattern'] and not fnmatch(filename, options['pattern']): continue filetype, encoding = mimetypes.guess_type(filename) filetype = filetype or 'application/octet-stream' full_path = os.path.join(static_root, filename) states = set() with open(full_path, 'rb') as handle: stream = BytesIO(handle.read()) if options['minify']: stream, state = minify(filename, filetype, stream) if state != '': states.add(state) # Set default headers headers = { 'Content-Type': filetype, 'Cache-Control': 'max-age=%d' % (3600 * 24 * 365) } # Gzip if compressable if options['gzip'] is True and filetype in COMPRESSIBLE: headers, stream = gzip_content(headers, stream) states.add('gzipped') self.stdout.write("{} {}".format(filename, list(states) if states else '')) # Send to S3 storage.upload(stream, os.path.join(directory, filename), headers) published_files.add(filename) self.save_json(published_files) if len(files_to_upload) == 0: self.stdout.write('No files changed on {} storage.'.format( self.storage_name)) else: self.stdout.write( 'Successfully uploaded {} files to {} bucket {}.'.format( len(files_to_upload), self.storage_name, bucket_name))