def get(self): uid = self.get_argument('uid', '') nickname = self.get_argument('nickname', '') picture = self.get_argument('picture', '') token = self.get_argument('token', '') if not uid: self.write(json.dumps({"error":"uid parameter need"})) return u = User.get_by_id(uid) if u is None: u = User(uid) u.prop['isappuser'] = "******" u.update(nickname, picture, token) u.set() RandomColumn.set('user', uid) else: needset = False if not 'isappuser' in u.prop: u.prop['isappuser'] = "******" RandomColumn.set('user', uid) needset = True if u.update(nickname, picture, token): needset = True if needset: u.set() data = {} gl = Gamelist.new_or_get_by_id(uid) data['me'] = u.to_dict(True) data['gamelist'] = gl.to_dict(data['me']['id']) Queue.pop(uid) self.write(json.dumps(data))
def add(self, song_id): song = Song.objects.get(id=song_id) user = User.objects.get(id=self.user_id) try: queue = Queue.objects.get(Song=song) except ObjectDoesNotExist: queue = Queue(Song=song) queue.save() queue.User.add(user) return song_id
def get(self): uid = self.get_argument('uid', '') toid = self.get_argument('toid', '') message = self.get_argument('message', '') if uid == '': self.write('{"error":"uid parameter need"}') return if toid == '': self.write('{"error":"toid parameter need"}') return if message == '': self.write('{"error":"message parameter need"}') return me = User.new_or_get_by_id(uid) if me == None: self.write('{"error":"not found"}') o = User.new_or_get_by_id(toid) if 'token' in o.prop: if o.prop['token'] != '(null)': payload = {'aps':{'alert':''}} if 'nickname' in o.prop: payload['aps']['alert'] = o.prop['nickname'] + '님이 메세지를 보냈습니다!' payload['aps']['alert'] = '누국가가 당신에게 메세지를 보냈습니다!' payload['aps']['sound'] = 'jinx.wav' send_pushnoti(o.prop['token'], payload) gl = Gamelist.new_or_get_by_id(uid) glto = Gamelist.new_or_get_by_id(toid) gid = gl.get_current_game_id(toid) newgame = gid is None if newgame: gid = Game.make_id(uid, toid) g = Game.new_or_get_by_id(gid) if g.say(uid, message): g.set() gl.set_game(g) glto.set_game(g) if newgame: gl.add_current_game(toid, gid) glto.add_current_game(uid, gid) gl.set() glto.set() Queue.push(uid, gid, toid, message, g.round) data = {} data['game'] = g.to_dict(uid, True) self.write(json.dumps(data))
def main(): # query and key feature extractor f_q = Encoder(input_shape); # update this model more frequently f_k = Encoder(input_shape); # update this model less frequently f_k.set_weights(np.array(f_q.get_weights())); # utils for training optimizer = tf.keras.optimizers.SGD(0.001, momentum = 0.9, decay = 0.0001); trainset = iter(tfds.load(name = 'imagenet_resized/64x64', split = tfds.Split.TRAIN, download = False).repeat(-1).map(parse_function).shuffle(batch_size).batch(batch_size).prefetch(tf.data.experimental.AUTOTUNE)); checkpoint = tf.train.Checkpoint(f_q = f_q, f_k = f_k, optimizer = optimizer); checkpoint.restore(tf.train.latest_checkpoint('checkpoints')); log = tf.summary.create_file_writer('checkpoints'); avg_loss = tf.keras.metrics.Mean(name = 'loss', dtype = tf.float32); # stuff 10 batches feature into queue queue = Queue(trainset, f_k, 10); augmentation = RandomAugmentation(input_shape, rotation_range = (-10, 10)); while True: x, label = next(trainset); # two augmented versions of the same batch data x_q = augmentation(x); # x_q.shape = (batch, 64, 64, 3) x_k = augmentation(x); # x_k.shape = (batch, 64, 64, 3) with tf.GradientTape() as tape: q = f_q(x_q); # q.shape = (batch, 128) k = f_k(x_k); # k.shape = (batch, 128) l_pos = tf.reshape(tf.linalg.matmul(tf.reshape(q, (-1, 1, 128)), tf.reshape(k, (-1, 128, 1))), (-1, 1)); # l_pos.shape = (batch, 1) l_neg = tf.reshape(tf.linalg.matmul(tf.reshape(q, (-1, 1, 128)), queue.get()), (-1, 10)); # l_neg.shape = (batch, 10) logits = tf.concat([l_pos, l_neg], axis = 1); # logits.shape = (batch, 11) # contrastive loss loss = tf.keras.losses.SparseCategoricalCrossentropy(from_logits = True)(tf.zeros((batch_size,)), logits / temp); grads = tape.gradient(loss, f_q.trainable_variables); avg_loss.update_state(loss); [tf.debugging.Assert(tf.math.logical_not(tf.math.reduce_any(tf.math.is_nan(grad))), grads + [optimizer.iterations,]) for grad in grads]; [tf.debugging.Assert(tf.math.logical_not(tf.math.reduce_any(tf.math.is_inf(grad))), grads + [optimizer.iterations,]) for grad in grads]; tf.debugging.Assert(tf.math.logical_not(tf.math.reduce_any(tf.math.is_nan(f_q(tf.constant(np.random.normal(size = (1, 64, 64, 3)), dtype = tf.float32))))), [optimizer.iterations]); optimizer.apply_gradients(zip(grads, f_q.trainable_variables)); # momentum update tf.debugging.Assert(tf.math.logical_not(tf.math.reduce_any(tf.math.is_nan(f_q(tf.constant(np.random.normal(size = (1, 64, 64, 3)), dtype = tf.float32))))), [optimizer.iterations]); for i in range(len(f_q.trainable_variables)): f_k.trainable_variables[i] = beta * f_k.trainable_variables[i] + (1 - beta) * f_q.trainable_variables[i]; # update dictionary queue.update(k); # write log if tf.equal(optimizer.iterations % 500, 0): with log.as_default(): tf.summary.scalar('loss', avg_loss.result(), step = optimizer.iterations); print('Step #%d Loss: %.6f' % (optimizer.iterations, avg_loss.result())); avg_loss.reset_states(); if tf.equal(optimizer.iterations % 5000, 0): # save model checkpoint.save(os.path.join('checkpoints', 'ckpt')); if False == os.path.exists('models'): os.mkdir('models'); f_k.save(os.path.join('models', 'model.h5'));
def add(self, song_id): song = Song.objects.get(id=song_id) user = User.objects.get(id=self.user_id) try: queue = Queue.objects.get(Song=song) except ObjectDoesNotExist: queue = Queue( Song=song ) queue.save() queue.User.add(user) return song_id
def add(self, song_id): song = Song.objects.get(id=song_id) user = User.objects.get(id=self.user_id) try: queue = Queue.objects.get(Song=song) except ObjectDoesNotExist: queue = Queue( Song=song, Created=fuzzy_datetime(datetime.now()), ) queue.save() queue.User.add(user) return song_id
def Enqueue_Put(Uri=None, File=None): if File is None: raise StorageError('Enqueue_Get(): File can not be None') if Uri is None: raise StorageError('Enqueue_Get(): Uri can not be None') Q = Queue() Q.status = 'Q' Q.action = 'P' Q.uri = Uri Q.file = File Q.service = Service Q.uqid = GetUniqueQueueID(Uri) Q.save() return Q
def push(self, mac_address, action, data): """Queue an action for a specific device""" q = Queue.create( mac_address = mac_address, action = action, data = data ) q.save()
def indexer(): index = SolrIndex() print("\nChecking queue for docs to be indexed..."), doc = Queue.one({'_id': 'indexing'}) if doc and doc['payload'][-1:]: Queue.collection.update({'_id': 'indexing'}, {'$pop':{'payload': 1}}) payload = doc['payload'][-1:] index.add(payload) print(" added!") print("\nChecking queue for docs to be removed from idx..."), doc = Queue.one({'_id': 'indexremoval'}) if doc and doc['payload']: Queue.collection.update({'_id': 'indexremoval'}, {'$pop':{'payload': 1}}) payload = doc['payload'].pop() index.delete(payload) print(" removed!")
def get_queue(query, name=False): try: conn = DBdao.get_connection() c = conn.cursor() if name: c.execute("""SELECT qid, img, range FROM queue WHERE name=?""", (query, )) tmp = c.fetchone() return Queue(id=tmp[0], img=tmp[1], ranging=tmp[2], name=query) else: c.execute("""SELECT name, img, range FROM queue WHERE qid=?""", (query, )) tmp = c.fetchone() return Queue(tmp[0], query, tmp[1], ranging=tmp[2]) except Exception as ignored: traceback.print_exc(file=sys.stdout) finally: conn.close()
def get_queue(id): try: conn = DBdao.get_connection() c = conn.cursor() c.execute("""SELECT name FROM queue WHERE qid=?""", (id, )) return Queue(c.fetchone()[0], id) except Exception as ignored: pass finally: conn.close()
def indexer(): index = SolrIndex() print("\nChecking queue for docs to be indexed..."), doc = Queue.one({'_id': 'indexing'}) if doc and doc['payload'][-1:]: Queue.collection.update({'_id': 'indexing'}, {'$pop': {'payload': 1}}) payload = doc['payload'][-1:] index.add(payload) print(" added!") print("\nChecking queue for docs to be removed from idx..."), doc = Queue.one({'_id': 'indexremoval'}) if doc and doc['payload']: Queue.collection.update({'_id': 'indexremoval'}, {'$pop': { 'payload': 1 }}) payload = doc['payload'].pop() index.delete(payload) print(" removed!")
def new_message(): body = request.form() name = body['Body'] number = body['From'] message = name + ", you are now in the queue." Queue().enqueue(name, number) resp = MessagingResponse() resp.message("Hello " + message_body + " you have been added." " There are " + repr(len(Queue()._queeue)-1) + " person in front of you.") return str(resp)
def add_photo(message): global queue_id if queue_id is None: print("Queue id is None") else: file_id = message.photo[-1].file_id queue = Queue(id=queue_id, img=str(file_id)) db.update_queue(queue) bot.send_photo(message.chat.id, file_id) keyboard = telebot.types.InlineKeyboardMarkup() btn1 = telebot.types.InlineKeyboardButton(text="Ролл", callback_data=str(queue_id)) keyboard.add(btn1) bot.send_message(message.chat.id, "Готово", reply_markup=keyboard) queue_id = None
def new_queue(message): try: name = " ".join(message.text.split()[1:]) except Exception as e: print(e) name = "" queue = Queue(name) added = db._add_queue(queue) if not added: bot.send_message(message.chat.id, "Така черга вже існує") else: qid = db.get_queue_id(queue) keyboard = telebot.types.InlineKeyboardMarkup() callback_button = telebot.types.InlineKeyboardButton( text="Зайняти чергу", callback_data=str(qid)) keyboard.add(callback_button) bot.send_message(message.chat.id, "Черга «" + queue.name + "»", reply_markup=keyboard)
def new_queue(message): try: ranging = int(message.text.split()[1]) name = " ".join(message.text.split()[2:]) except Exception as e: print(e) name = "" ranging = 0 queue = Queue(name, ranging=ranging) added = db._add_queue(queue) qid = queue.id keyboard = telebot.types.InlineKeyboardMarkup() btn2 = telebot.types.InlineKeyboardButton(text="Додати фото", callback_data="img " + str(qid)) keyboard.add(btn2) if not added: bot.send_message(message.chat.id, "Такий ролл уже існує\n") else: bot.send_message(message.chat.id, "Ролл «" + queue.name + "»", reply_markup=keyboard)
def log_file_analyse(): from models import Experiment, Queue print("Name:", request.form['name-input']) print("Tags:", request.form['tags-input']) print("Notes", request.form['name-input']) print("Author", request.form['author-input']) print("Dataset alias", request.form['alias-input']) print("Dataset:", request.files['datasetInputFile']) name = request.form['name-input'] tags_s = request.form['tags-input'] notes = request.form['name-input'] authors_s = request.form['author-input'] alias = request.form['alias-input'] file = request.files['datasetInputFile'] tags = tags_s.split(',') authors = authors_s.split(',') filename = None if file.filename == '': print("PROOOBLEM!!!!") return redirect(url_for("home")) if file and allowed_file(file.filename): filename = secure_filename(file.filename) file.save(os.path.join(app.config['UPLOAD_FOLDER'], filename)) print("here") e = Experiment(name=name, data_file_path=filename, authors=authors, tags=tags, notes=notes, alias=alias) q = Queue(e) db.session.add(e) db.session.add(q) db.session.commit() print("New task") queue.create_new_task(q) return redirect(url_for('queue'))
def get(self): self._current_user = self.require_login() if not self._current_user: self.response.out.write(json.dumps({"error": "please log in"})) return q = Queue.all().get() if not q: q = Queue() #Push onto queue if str(self._current_user.id) in q.users: self.response.out.write(json.dumps({"success": "already in queue"})) elif len(q.users) == 0: q.users = [self._current_user.id] self.response.out.write(json.dumps({"success": "added to queue"})) else: #Found a match. Pop & Serve matched = q.users[0] q.users = q.users[1:] #Randomly choose a project projects = Project.all().fetch(1000) random.seed() project = random.choice(projects) #Actually create the match match = Match(project_id = str(project.key()), users = [self._current_user.id, matched], outcome = [0, 0]) hackers = Hacker.all().filter("user IN", match.users).fetch(8) match.hacker_list = [] for hacker in hackers: match.hacker_list.append(str(hacker.key())) match.put() #Notify the users via socket broadcast(match, json.dumps({"success": "match found"})) self.response.out.write("herp") q.put()
def add(link): if request.method == 'POST': found = Queue.query.get(link) if found == None: newsong = Queue(songid=link, name=request.form["title"], upvote=1) db.session.add(newsong) db.session.commit() session[link] = "1" return redirect(url_for('queue')) else: try: print session[link] print "already upped" except: print "upvoting" found.upvote += 1 session[link] = "1" db.session.add(found) db.session.commit() return redirect(url_for('queue')) else: return redirect(url_for('queue'))
@app.route('/') def app(): return 'helloooooo' @app.route('/new', methods=['POST']) def Create (): user = request.json.get('user', None) phone = request.json.get('phone', None) if not user: return jsonify({"msg": "name is required"}), 400 if not phone: return jsonify({"msg": "phone is required"}), 400 turno = Queue() turno.name = cliente turno.enqueue() msg = aq.enqueue(turno) return jsonify(msg), 200 @app.route('/new', methods=['GET']) def dequeue(): turno = aq.dequeue() return jsonify({"msg": "Processing next in line", "item": turno}), 200
from flask_cors import CORS from utils import APIException, generate_sitemap from admin import setup_admin from models import db, User, Queue #from models import Person app = Flask(__name__) app.url_map.strict_slashes = False app.config['SQLALCHEMY_DATABASE_URI'] = os.environ.get('DB_CONNECTION_STRING') app.config['SQLALCHEMY_TRACK_MODIFICATIONS'] = False MIGRATE = Migrate(app, db) db.init_app(app) CORS(app) setup_admin(app) queue=Queue() # Handle/serialize errors like a JSON object @app.errorhandler(APIException) def handle_invalid_usage(error): return jsonify(error.to_dict()), error.status_code # generate sitemap with all your endpoints @app.route('/') def sitemap(): return generate_sitemap(app) @app.route('/user/next', methods=['GET']) def next_user(): queue.dequeue() return jsonify({"msg":"success"})
from twilio.rest import Client import os from models import Queue, Person Q = Queue() def first_function(number, mess): account_sid = os.environ.get('SID_KEV') auth_token = os.environ.get('TOKEN_KEV') client = Client(account_sid, auth_token) message = client.messages \ .create( body=mess, from_='+12064660790', to=number ) print(message.sid) # return repr(Q._queue)
load_dotenv #from models import Person app = Flask(__name__) app.url_map.strict_slashes = False app.config['SQLALCHEMY_DATABASE_URI'] = os.environ.get('DB_CONNECTION_STRING') app.config['SQLALCHEMY_TRACK_MODIFICATIONS'] = False CORS(app) # Handle/serialize errors like a JSON object @app.errorhandler(APIException) def handle_invalid_usage(error): return jsonify(error.to_dict()), error.status_code user = Queue() # generate sitemap with all your endpoints @app.route('/') def sitemap(): return generate_sitemap(app) @app.route('/new', methods=['POST']) def handle_post(): if not request.json.get('name'): return jsonify({"name": "is required"}), 422 if not request.json.get('phone'): return jsonify({"phone": "is required"}), 422 item = { "name": request.json.get('name'), "phone": request.json.get('phone')
""" This module takes care of starting the API Server, Loading the DB and Adding the endpoints """ import os from flask import Flask, request, jsonify, url_for from flask_migrate import Migrate from flask_swagger import swagger from flask_cors import CORS from utils import APIException, generate_sitemap from models import db, Queue queue = Queue() all = queue.get_queue() app = Flask(__name__) app.url_map.strict_slashes = False app.config['SQLALCHEMY_DATABASE_URI'] = os.environ.get('DB_CONNECTION_STRING') app.config['SQLALCHEMY_TRACK_MODIFICATIONS'] = False MIGRATE = Migrate(app, db) db.init_app(app) CORS(app) # Handle/serialize errors like a JSON object @app.errorhandler(APIException) def handle_invalid_usage(error): return jsonify(error.to_dict()), error.status_code # generate sitemap with all your endpoints @app.route('/') def sitemap(): return generate_sitemap(app)
def get(self): uid = self.get_argument('uid', '') self.write(Queue.pop(uid))
FtpHandler = FTP() FtpHandler.connect(FtpData['hostname'], FtpData['port']) FtpHandler.login(FtpData['username'], FtpData['password']) FtpHandler.cwd(FtpData['path']) except error_perm, e: raise StorageError('GetFile(): %s' % str(e)) try: FileSize = FtpHandler.size(FtpData['filename']) except error_perm, e: raise StorageError('GetFile(): Getting filesize: %s' % str(e)) if Queue.file is None: File = RegisterFile(Queue.service, FtpData['filename'], str(FileSize) + 'b') Queue.file = File Queue.save() try: LocalFile = open(Queue.service.localpath + Queue.file.pfilename,'wb') except IOError as e: DeleteFile(File.ufid) raise StorageError('GetFile(): %s [%s]' % (e.strerror, Queue.service.localpath + Queue.file.pfilename)) CBack = CallBack() CBack.localfile = LocalFile CBack.queue = Queue try: FtpHandler.retrbinary('RETR %s' % FtpData['filename'],CBack.Write) except error_perm, e:
""" import os from flask import Flask, request, jsonify, url_for from flask_swagger import swagger from flask_cors import CORS from utils import APIException, generate_sitemap from models import Queue from twilio.rest import Client #from models import Person app = Flask(__name__) app.url_map.strict_slashes = False app.config['SQLALCHEMY_TRACK_MODIFICATIONS'] = False CORS(app) newQueue = Queue() # Handle/serialize errors like a JSON object @app.errorhandler(APIException) def handle_invalid_usage(error): return jsonify(error.to_dict()), error.status_code # generate sitemap with all your endpoints @app.route('/') def sitemap(): return generate_sitemap(app) @app.route('/new', methods=['POST'])
def getting_all_messages(): return jsonify(Queue()._queue), 200
from flask_cors import CORS from utils import APIException, generate_sitemap from models import db, Queue from twilio import twiml from twilio.rest import Client from twilio.twiml.messaging_response import Message, MessagingResponse app = Flask(__name__) app.url_map.strict_slashes = False app.config['SQLALCHEMY_DATABASE_URI'] = os.environ.get('DB_CONNECTION_STRING') app.config['SQLALCHEMY_TRACK_MODIFICATIONS'] = False MIGRATE = Migrate(app, db) db.init_app(app) CORS(app) queue = Queue(mode="FIFO") @app.errorhandler(APIException) def handle_invalid_usage(error): return jsonify(error.to_dict()), error.status_code @app.route('/new', methods=['POST']) def addQueue(): user = request.get_json() queue.enqueue(user) body = request.get_json()