def on_post(req, resp): request_payload = json.loads(req.stream.read().decode('utf-8')) ticket_type = request_payload.get('ticket_type') message = request_payload.get('message') try: ticket = Ticket.create_new(ticket_type, message) with session() as db: repo = Repo(db) ticket.ticket_id = repo.save_new_ticket(ticket) db.commit() resp.body = json.dumps( { 'id': ticket.ticket_id, 'status': ticket.status }, sort_keys=True, indent=4) except ValueError as error: resp.body = json.dumps({ 'status': 'failed', 'reason': str(error) }, sort_keys=True, indent=4)
def delete(self, obj=None, mode='upimage'): if not obj: return False filename = self.route_args.get('filename') con = session() repo = Repository(con) delkey = obj.delkey if mode == 'upimage': obj = repo.get_upimage(id=obj.id) obj.delkey = delkey res = repo.delete_upimage(obj) elif mode == 'reply': obj = repo.get_reply(id=obj.id) obj.delkey = delkey res = repo.delete_reply(obj) con.commit() if mode == 'upimage': res = repo.get_upimage(obj.id) elif mode == 'reply': res = repo.get_reply(obj.id) if res: return False if obj.img: os.remove(os.path.join('contents/static/upload/', obj.img)) os.remove(os.path.join('contents/static/upload/', obj.thumb)) return True
def add_commit_object(self, obj, mode=''): #{{{ con = session() repo = Repository(con) if mode == '': res = repo.add_upimage(obj) elif mode == 'reply': res = repo.add_reply(obj) con.commit() return res
def post(self): greeting = Greeting() if (not self.try_update_model(greeting) or not self.validate(greeting, greeting_validator)): return self.get(greeting) with session() as db: repo = Repository(db) if not repo.add_greeting(greeting): self.error('Sorry, can not add your greeting.') return self.get(greeting) db.commit() return self.see_other_for('list')
def get(self, upimage=None): #{{{ page = self.route_args.get('page', 1) con = session() repo = Repository(con) count = repo.get_count() pages = self.pagecount(count, SELECT_LIMIT) upimages = repo.list_upimages(page) # list upimage = upimage or UpImage() # add form response = self.render_response('list.mako', upimages=upimages, upimage=upimage, count=count, page=page, pages=pages) response.cache_dependency = ('d_list', ) return response
def on_put(req, resp, ticket_id): payload = json.loads(req.stream.read().decode('utf-8')) status_name = payload.get('status') with session() as db: repo = Repo(db) ticket = repo.list_ticket(int(ticket_id)) repo.change_ticket_status(ticket=ticket, status=Ticket.STATUSES[status_name]) resp.body = json.dumps({ 'id': ticket_id, 'status': status_name }, sort_keys=True, indent=4)
def on_get(req, resp, ticket_id): with session() as db: repo = Repo(db) ticket = repo.list_ticket(int(ticket_id)) if ticket is None: resp.body = json.dumps({'status': 'not found'}, sort_keys=True, indent=4) else: resp.body = json.dumps( { 'id': ticket.ticket_id, 'ticket_type': ticket.ticket_type, 'status': Ticket.find_status_name(ticket.status) }, sort_keys=True, indent=4)
def private_prediction(X): X = X.reshape(shape_X) with session() as sess: writer = tf.summary.FileWriter(TENSORBOARD_DIR, sess.graph) run_options = tf.RunOptions(trace_level=tf.RunOptions.FULL_TRACE) run_metadata = tf.RunMetadata() def write_metadata(tag): writer.add_run_metadata(run_metadata, tag) chrome_trace = timeline.Timeline(run_metadata.step_stats).generate_chrome_trace_format() with open('{}/{}.ctr'.format(TENSORBOARD_DIR, tag), 'w') as f: f.write(chrome_trace) sess.run( tf.global_variables_initializer(), options=run_options, run_metadata=run_metadata ) write_metadata('init') sess.run( cache_updators, options=run_options, run_metadata=run_metadata ) write_metadata('populate') for i in range(10): y_pred = sess.run( reveal(y), feed_dict=encode_input((input_x, X)), options=run_options, run_metadata=run_metadata ) write_metadata('predict-{}'.format(i)) y_pred_private = decode_output(y_pred) writer.close() return y_pred_private
def get(self): con = session() repo = Repository(con) CONTENT_TYPE_XML='text/xml' CONTENT_TYPE_XML_RSP=CONTENT_TYPE_XML+'; charset=utf-8' if DEBUG: base_url = 'http://192.168.72.100:8080' else: base_url = 'http://shoboi.net' feed = feedgenerator.Atom1Feed( title = 'しょぼいろだ。', link = 'http://shobi.net/', feed_url = 'http://shoboi.net/atom', description = u'エロも笑いも虹も惨事もしょぼいろだで共有してね', author_name=u'しょぼい。', language = u"ja", pubdate = datetime.utcnow() ) upimages = repo.list_upimages() for idx, i in enumerate(upimages): feed.add_item( title = i.title or 'タイトルなし', link = '%s/detail/%s' % (base_url, i.id), description = """<![CDATA[ <a href="%s/detail/%s"> <img src="%s/img/%s"> </a> ]]>""" % (base_url, i.id, base_url, i.thumb), author_name = i.author or '名無し', pubdate = datetime.now() ) if idx >= 4: # 5件まで break; response = HTTPResponse() content_type = ('Content-Type', CONTENT_TYPE_XML_RSP) response.headers[0] = content_type response.write(feed.writeString('utf-8')) response.cache_dependency = ('d_atom', ) return response
def post(self): #{{{ print(self.request.environ) if not self.validate_xsrf_token(): return self.redirect_for(self.route_args.route_name) upimage = UpImage() if not self.request.files.get('img[]'): if (not self.try_update_model(upimage) or not self.validate(upimage, upimage_validator)): cached.dependency.delete('d_list') return self.get(upimage) count = len(self.request.files['img[]']) for idx, img in enumerate(self.request.files['img[]']): upimage = self.imgbbs_preprocess(upimage, img) if (not self.try_update_model(upimage) or not self.validate(upimage, upimage_validator)): return self.get(upimage) con = session() repo = Repository(con) # スレ立てる if idx == 0: upimage = self.add_commit_object(upimage) if not upimage: self.error('Sorry, can not add your image.') cached.dependency.delete('d_list') return self.get(upimage) thread_id = upimage.id # 画像が複数件あるなら立てたスレにレスの形で残りをうp if idx > 0 and count > 1: upimage.parent_id = thread_id if not self.add_commit_object(upimage, mode='reply'): self.error('Sorry, can not add your image.') cached.dependency.delete('d_detail') cached.dependency.delete('d_list') return self.get(reply) cached.dependency.delete('d_list') return self.see_other_for('list')
def get(self, reply=None): id = self.route_args.get('id') con = session() repo = Repository(con) upimage = repo.get_upimage(id) if not upimage: response = self.render_response('errors/http404.mako') cached.dependency.delete('d_list') cached.dependency.delete('d_detail') return response replies = repo.get_replies(id) reply = reply or Reply() if not reply.parent_id: reply.parent_id = upimage.id response = self.render_response('detail.mako', upimage=upimage, reply=reply, replies=replies) response.cache_dependency = ('d_detail', ) return response
def clear_database(): with session() as db: repo = Repo(db) repo.clear_database() db.commit()
def get(self): with session() as db: repo = Repository(db) greetings = repo.list_greetings() return self.render_response('list.html', greetings=greetings)
# writer.add_run_metadata(run_metadata, 'prediction-{}'.format(i)) # chrome_trace = timeline.Timeline(run_metadata.step_stats).generate_chrome_trace_format() # with open('{}/{}.ctr.json'.format(TENSORBOARD_DIR, 'prediction-{}'.format(i)), 'w') as f: # f.write(chrome_trace) # writer.close() # return Y ############################## # Run # ############################## with session() as sess: writer = tf.summary.FileWriter(TENSORBOARD_DIR, sess.graph) run_options = tf.RunOptions(trace_level=tf.RunOptions.FULL_TRACE) run_metadata = tf.RunMetadata() print 'Distributing...' for batch_index, (batch_x, batch_y) in enumerate(zip(batches_x, batches_y)): sess.run( distribute_x, feed_dict=dict([ (input_xi, Xi) for input_xi, Xi in zip(input_x, decompose(encode(batch_x))) ]), options=run_options, run_metadata=run_metadata