def unpublished(): base_query = (Postcard.query.filter(Postcard.deleted == False) .filter(db.or_(Postcard.published == False, Postcard.published == None)) .options(db.subqueryload('tags')) .order_by(db.asc(Postcard.date))) pagination = build_pagination(base_query) # patch in larger thumbnails for postcard in pagination.items: info = {} jsoninfo = getattr(postcard, 'json_image_info') if jsoninfo: info = json.loads(jsoninfo).get('small', {}) postcard.front_thumb = info.get('front', {}) postcard.back_thumb = info.get('back', {}) return render_template( 'unpublished.html', url_base='//s3.amazonaws.com/' + app.config['S3_BUCKET'] + '/', DEFAULT_THUMB='noimage-large.png', DEFAULT_THUMB_WIDTH=215, DEFAULT_THUMB_HEIGHT=215, pagination=pagination, current_page=".unpublished", )
def unpublished(): base_query = (Postcard.query.filter(Postcard.deleted == False).filter( db.or_(Postcard.published == False, Postcard.published == None)).options( db.subqueryload('tags')).order_by(db.asc(Postcard.date))) pagination = build_pagination(base_query) # patch in larger thumbnails for postcard in pagination.items: info = {} jsoninfo = getattr(postcard, 'json_image_info') if jsoninfo: info = json.loads(jsoninfo).get('small', {}) postcard.front_thumb = info.get('front', {}) postcard.back_thumb = info.get('back', {}) return render_template( 'unpublished.html', url_base='//s3.amazonaws.com/' + app.config['S3_BUCKET'] + '/', DEFAULT_THUMB='noimage-large.png', DEFAULT_THUMB_WIDTH=215, DEFAULT_THUMB_HEIGHT=215, pagination=pagination, current_page=".unpublished", )
def generate_jsonp(): query = Postcard.query.filter_by(published=True, deleted=False).order_by(db.asc(Postcard.id)) all_postcards = [] for postcard in query: # make sure the images are in place if not postcard.json_image_info: continue # add the postcard data image_info = json.loads(postcard.json_image_info) data = dict(id=postcard.id, date=str(postcard.date), country=postcard.country, latitude=str(postcard.latitude), longitude=str(postcard.longitude), images=image_info) all_postcards.append(data) # commit any changes db.session.commit() # output the chunks index = {} for chunk_id, chunk in enumerate(chunks(all_postcards, CHUNK_SIZE)): json_data = json.dumps(dict(chunk_id=chunk_id, postcards=chunk)) upload_to_s3('postcards%d.js' % chunk_id, 'postcardCallback%d(%s)' % (chunk_id, json_data), 'application/javascript') range = (chunk[0]["id"], chunk[-1]["id"]) index[chunk_id] = range # file containing latest postcards and an index mapping ids to chunks data = dict(total_postcard_count=len(all_postcards), index=index, postcards=all_postcards[-CHUNK_SIZE:]) json_data = json.dumps(data) upload_to_s3('postcards-latest.js', 'postcardCallback(%s)' % json_data, 'application/javascript') # all the postcards in one file data = dict(postcards=all_postcards) json_data = json.dumps(data) upload_to_s3('postcards-all.js', 'postcardCallback(%s)' % json_data, 'application/javascript')
def handle_queued_jobs(filter=None): last_id = 0 while True: try: query = (QueuedJob.query.filter(QueuedJob.id > last_id) .order_by(db.asc(QueuedJob.id))) if filter: query = query.filter(QueuedJob.handler == filter) query = query.limit(1) job = query.one() except sqlalchemy.orm.exc.NoResultFound: break try: _handle_queued_job(job) except Exception: traceback.print_exc() else: db.session.delete(job) db.session.commit() last_id = job.id
def handle_queued_jobs(filter=None): last_id = 0 while True: try: query = (QueuedJob.query.filter(QueuedJob.id > last_id).order_by( db.asc(QueuedJob.id))) if filter: query = query.filter(QueuedJob.handler == filter) query = query.limit(1) job = query.one() except sqlalchemy.orm.exc.NoResultFound: break try: _handle_queued_job(job) except Exception: traceback.print_exc() else: db.session.delete(job) db.session.commit() last_id = job.id
def generate_jsonp(): dimensions = dict(small=(215, 215), full=(800, 800)) query = Postcard.query.filter_by(published=True, deleted=False).order_by(db.asc(Postcard.id)) all_postcards = [] for postcard in query: # make sure the images are in place if not postcard.json_image_info: image_info = {} for size in ('small', 'full'): image_info[size] = {} for side in ('front', 'back'): full_image_url = getattr(postcard, side) if not full_image_url: continue img_data = make_smaller_version_of_image(full_image_url, dimensions[size]) filename, (width, height) = img_data image_info[size][side] = dict(filename=filename, width=width, height=height) postcard.json_image_info = json.dumps(image_info) else: image_info = json.loads(postcard.json_image_info) # add the postcard data data = dict(id=postcard.id, date=str(postcard.date), country=postcard.country, latitude=str(postcard.latitude), longitude=str(postcard.longitude), images=image_info) all_postcards.append(data) # commit any changes db.session.commit() # output the chunks index = {} for chunk_id, chunk in enumerate(chunks(all_postcards, CHUNK_SIZE)): json_data = json.dumps(dict(chunk_id=chunk_id, postcards=chunk)) upload_to_s3('postcards%d.js' % chunk_id, 'postcardCallback%d(%s)' % (chunk_id, json_data), 'application/javascript') range = (chunk[0]["id"], chunk[-1]["id"]) index[chunk_id] = range # file containing latest postcards and an index mapping ids to chunks data = dict(total_postcard_count=len(all_postcards), index=index, postcards=all_postcards[-CHUNK_SIZE:]) json_data = json.dumps(data) upload_to_s3('postcards-latest.js', 'postcardCallback(%s)' % json_data, 'application/javascript') # all the postcards in one file data = dict(postcards=all_postcards) json_data = json.dumps(data) upload_to_s3('postcards-all.js', 'postcardCallback(%s)' % json_data, 'application/javascript')
def generate_jsonp(): dimensions = dict(small=(215, 215), full=(800, 800)) query = Postcard.query.filter_by(published=True, deleted=False).order_by( db.asc(Postcard.id)) all_postcards = [] for postcard in query: # make sure the images are in place if not postcard.json_image_info: image_info = {} for size in ('small', 'full'): image_info[size] = {} for side in ('front', 'back'): full_image_url = getattr(postcard, side) if not full_image_url: continue img_data = make_smaller_version_of_image( full_image_url, dimensions[size]) filename, (width, height) = img_data image_info[size][side] = dict(filename=filename, width=width, height=height) postcard.json_image_info = json.dumps(image_info) else: image_info = json.loads(postcard.json_image_info) # add the postcard data data = dict(id=postcard.id, date=str(postcard.date), country=postcard.country, latitude=str(postcard.latitude), longitude=str(postcard.longitude), images=image_info) all_postcards.append(data) # commit any changes db.session.commit() # output the chunks index = {} for chunk_id, chunk in enumerate(chunks(all_postcards, CHUNK_SIZE)): json_data = json.dumps(dict(chunk_id=chunk_id, postcards=chunk)) upload_to_s3('postcards%d.js' % chunk_id, 'postcardCallback%d(%s)' % (chunk_id, json_data), 'application/javascript') range = (chunk[0]["id"], chunk[-1]["id"]) index[chunk_id] = range # file containing latest postcards and an index mapping ids to chunks data = dict(total_postcard_count=len(all_postcards), index=index, postcards=all_postcards[-CHUNK_SIZE:]) json_data = json.dumps(data) upload_to_s3('postcards-latest.js', 'postcardCallback(%s)' % json_data, 'application/javascript') # all the postcards in one file data = dict(postcards=all_postcards) json_data = json.dumps(data) upload_to_s3('postcards-all.js', 'postcardCallback(%s)' % json_data, 'application/javascript')