def collect(): ''' Execute inside user container and update database ''' requires = ["fileid"] if not request.json: return jsonify({"msg": "not json"}), 400 for req in requires: if not request.json.get(req): return jsonify({"msg": "no {}".format(req)}), 400 fileid = request.json['fileid'] user_ip = request.remote_addr user = db.get_user_by_ip(user_ip) if user is None: return jsonify({"msg": "ip switched"}) user.touch() if user.form == 'ghost': return jsonify({"msg": "you're a ghost"}) script = db.get("Script", fileid) if script is None: return jsonify({"msg":"script not found"}) filename = script.filename text = script.filetext row = script.row col = script.col is_bad_file = script.material >= 20 author = db.get("User", script.user_id) if author.id == user.id: return jsonify({"msg": "script is yours"}) if script.has_collected(user.id): return jsonify({"msg": "you've already collected this script"}) container = nest.load_container(user.id) file_obj = create_file(user_ip, filename, text, row, col) result = nest.run_file(user.id, file_obj) result['filename'] = filename; result['filetext'] = text; if result["has_heart"] == None or result["has_heart"] == False: user.form = 'ghost' if is_bad_file: author.add_material(user.pay_material(script.material)) else: user.form = user.character script.collect(user.id) if is_bad_file: user.add_material(script.material) else: user.add_material(script.material) author.add_material(script.material) db.save() ret = user.to_dict() del ret["ip"] return jsonify({"result": result, "user": ret})
def logRealtime(): global data tmpStr = '{' tmpStr += ' "analog":{' result = db.get("SELECT gpio_id, name FROM hc_gpio WHERE NOT gpio_type='dout' AND active=1") count=0 #count number of pumps for (gpio_id, name) in result: if(count > 0): tmpStr += "," tmpStr += ' "'+name+'":"' + str( getattr(data, gpio_id) ) + '"' count+=1 tmpStr += ' },' tmpStr += ' "digital":{' result = db.get("SELECT gpio_id, name FROM hc_gpio WHERE gpio_type='dout' AND active=1") count=0 #count number of pumps for (gpio_id, name) in result: if(count > 0): tmpStr += "," tmpStr += ' "'+name+'":"' + str( getattr(data, gpio_id) ) + '"' count+=1 tmpStr += ' }' tmpStr += '}' f = open('/tmp/hydrod/realtime.json','w') f.write( tmpStr ) f.close() return ("Realtime data logged\n")
def initial_dp_list(): arr = [] for i in xrange(2, 20): rev = type('revenue', (object, ), json.loads(d.get(db_name, 'revenue', i))) inv = type('inventory', (object, ), json.loads(d.get(db_name, 'inventory', i))) decision = selection(inv, rev) if (decision == 1): arr.append(rev.id) print arr ar = [] for i in xrange(0, len(arr)): y = [] q = type('product', (object, ), json.loads(d.get(db_name, 'product', arr[i]))) y.append(q.id) y.append(str(q.name)) y.append(q.number_items_dp) y.append(q.inventory['maxi']) ar.append(y) print ar return ar
def generate_db_issues_from_pdfs(year, month, day): with app.app_context(): for pdf in os.listdir(PDFS_DIR): number = pdf.split('.')[0] date = '{}-{:02}-{:02}'.format(year, month, day) db.query('INSERT INTO issues (number, date) VALUES (?, ?)', (number, date)) if day == 15 and month == 12: year += 1 if month == 1 or month == 8: day = 1 month += 1 else: if day == 15: if month == 12: month = 1 else: month += 1 if day == 1: day = 15 else: day = 1 db.get().commit()
def run(imname, equip, rec_cmd, ipaddr): """ 启动容器 """ imname = registry_host + '/' + imname if version != 'x86_64': imname = imname + '-arm' logger = logging.getLogger("TCPServer.buildDock.run") sql = "select port from portdb where equip='" + equip + "'" hostport = db.get(sql) if hostport is None: sql = "select port from portdb where status=0 limit 1" hostport = db.get(sql) cmd = "docker run -it --name " + equip + " -d -p " + str(hostport) + ":3000 -P --link=test-mysql:mysql_test -e HOST=" + ipaddr +" " + imname + " " + rec_cmd (status, output) = subprocess.getstatusoutput(cmd) if status == 0: logger.info("docker is running!") sql = 'update portdb set status=1,equip="'+equip+'",ipaddress="'+ipaddr+'" where port='+str(hostport) db.exec(sql) flag = [True, hostport] else: logging.error("docker cannot run!") print(cmd) flag = [True, hostport] return flag
def manage_files(self): running = int(db.get('settings', ['value'], where='skey=%s', args=['running'])[0][0]) while running: self.deduplicate_files() # self.push_files() time.sleep(5) running = int(db.get('settings', ['value'], where='skey=%s', args=['running'])[0][0])
def update_kill(kill_id): with db.conn.cursor() as c: r = db.get(c, ''' SELECT ship_type_id, cost FROM kill_characters LEFT JOIN item_costs ON ship_type_id = item_costs.type_id WHERE kill_id = %s AND victim ''', kill_id) if r[1] is not None: cost = r[1] else: cost = 0 if r[0] == 33328: # Capsule - Genolution 'Auroral' 197-variant cost -= au79_cost # singleton is 0 normally and for BPOs and 2 for BPCs # we want to divide by 1 for BPOs and by 1000 for BPCs r = db.get(c, ''' SELECT SUM(cost * (dropped + destroyed) / (singleton * 499.5 + 1)) FROM items JOIN item_costs ON items.type_id = item_costs.type_id WHERE kill_id = %s ''', kill_id) if r[0]: cost += int(r[0]) if cost < 0: cost += au79_cost print('goddamnit CCP', kill_id) # sometimes, the implant just isn't there for a golden pod... c.execute('UPDATE kill_costs SET cost = %s WHERE kill_id = %s', (cost, kill_id))
def post(self): station = self.station_parser.parse_args() db.query("insert into stations values (?, ?)", [station["name"], station["url"]]) db.get().commit() station["_id"] = db.query("select last_insert_rowid() as _id", one=True)["_id"] publisher.publishStations(StationList().get()) return station
async def queuer(_, message): global running try: usage = """ **Usage:** __/play Song_Name__ __/play youtube/saavn Song_Name__ __/play Reply_On_Audio__""" async with PLAY_LOCK: if ( len(message.command) < 2 and not message.reply_to_message ): return await message.reply_text(usage) if "call" not in db: return await message.reply_text( "**Use /joinvc First!**" ) if message.reply_to_message: if message.reply_to_message.audio: service = "telegram" song_name = message.reply_to_message.audio.title else: return await message.reply_text( "**Reply to a telegram audio file**" ) else: text = message.text.split("\n")[0] text = text.split(None, 2)[1:] service = text[0].lower() services = ["youtube", "saavn"] if service in services: song_name = text[1] else: service = get_default_service() song_name = " ".join(text) requested_by = message.from_user.first_name if "queue" not in db: db["queue"] = asyncio.Queue() if not db["queue"].empty() or db.get("running"): await message.reply_text("__**Added To Queue.__**") await db["queue"].put( { "service": service or telegram, "requested_by": requested_by, "query": song_name, "message": message, } ) if not db.get("running"): db["running"] = True await start_queue() except Exception as e: await message.reply_text(str(e)) e = traceback.format_exc() print(e)
def collect(): ''' Execute inside user container and update database ''' user = check_user() # if type(user) == dict: return jsonify(user) requires = ["fileid"] # # failed = bad_request(requires) if failed is not None: return jsonify(failed) fileid = request.json['fileid'] # # # if user.form == 'ghost': return jsonify({"msg": "you're a ghost", "error": True}) script = db.get("Script", fileid) if script is None: return jsonify({"msg": "script not found", "error": True}) filename = script.filename text = script.filetext row = script.row col = script.col author = db.get("User", script.user_id) if author.id == user.id: return jsonify({"msg": "script is yours", "error": True}) if script.has_collected(user.id): return jsonify({ "msg": "you've already collected this script", "error": True }) is_bad_file = script.material >= 20 container = nest.load_container(user) file_obj = create_file(filename, text, row, col) result = nest.run_file(user, file_obj) if result["has_heart"] == None or result["has_heart"] == False: user.form = 'ghost' if is_bad_file: author.add_material(user.pay_material(script.material)) else: user.form = user.character script.collect(user.id) if is_bad_file: user.add_material(script.material) else: user.add_material(script.material) author.add_material(script.material) result['script'] = return_script(user, script) result['material'] = script.material user.touch() db.save() # # # # return jsonify({"user": return_user(user), "result": result})
def get(self): ncollaborators = db.get("ncollaborators") nsent = db.get("nsent") nrcv = db.get("nrcv") obj = {'ncollaborators': ncollaborators, 'nsent' : nsent, 'nrcv' : nrcv } self.set_header('Content-Type', 'application/json') self.set_header('Content-Encoding','gzip') content = db.jsonToGzip(obj) self.write(content)
def evaluate(query): for feature in query: histograms = [] for doi in db.get('features', feature): histogram = db.get('histograms', doi) pubdate = metadata.publication_date(doi) histogram.group_by(lambda (date): (date-pubdate).days / 30) histograms.append(histogram) summary = data.summary(histograms) summary['feature'] = feature yield summary
def get(self, height=None, hash=None): if height == None and hash == None: return 'must provide either hash or height' elif height == None: # get using hash height = get(hash, 'coofblocksindex') as_json = get('blk-{}'.format(height), 'coofblocks') if as_json == None: return 'block not found' self.from_json(as_json) return self
def init_db(): with app.app_context(): schema_file = os.path.join(app.root_path, 'db.schema') schema = open(schema_file, 'r') cur = db.get().cursor() cur.executescript(schema.read()) schema.close() cur.close() db.get().commit()
def dbtest(): try: indigo.server.log("db is " + str(db)) indigo.server.log(str(dir(db))) indigo.server.log(str(type(db.GLOBALSETTINGS))) indigo.server.log(str(db.get("mykey"))) db.put("mykey", "1") indigo.server.log(str(db.get("mykey"))) db.put("mykey", "2") indigo.server.log(str(db.get("mykey"))) except Exception, e: return str(e) + "::: " + str(db)
def _getXvmStatActiveTokenData(): playerId = _getPlayerId() if playerId is None: return None tdata = db.get('tokens', playerId) if tdata is None: # fallback to the last player id if replay is running if utils.is_replay(): playerId = db.get('tokens', 'lastPlayerId') if playerId is None: return None tdata = db.get('tokens', playerId) return tdata
def activate(self, req, resp, id): does_service_exist = db.does_service_exist(id) if not does_service_exist: message = "Service %s does not exist" tokens = [id] code = "DSM-LITE-404" resp.status = falcon.HTTP_404 resp.body = error_response(code=code, message=message, tokens=tokens) return service_data = db.get(id) service_state = service_data.get("serviceState") if service_state != "Reserved": message = "Invalid service state: %s. Service should be in %s state to activate the service" tokens = [service_state, "Reserved"] code = "DSM-LITE-405" resp.status = falcon.HTTP_405 resp.body = error_response(code=code, message=message, tokens=tokens) return result = backend.perform_operation(service_data, "activate") if result: service_data["serviceState"] = "Active" else: service_data["serviceState"] = "Reserved" db.update(service_data) resp.status = falcon.HTTP_200 resp.body = success_response(service_data)
def our_mime(): our_format = DB.get('format') or 'mp3' if our_format == 'aac': return 'audio/aac' # Default to mp3 return 'audio/mpeg'
def add(): if 'username' not in session: return redirect('/') if request.method == 'GET': return render_template('add.html') elif request.method == 'POST': form = forms.TaskForm(request.form) if not form.validate(): return render_template('error.html', message='Invalid task'), 400 user = db.get(session['username']) if len(user['tasks']) >= 5: return render_template('error.html', message='Maximum task limit reached!'), 400 task = { 'title': form.title.data, 'content': form.content.data, 'priority': form.priority.data, 'id': len(user['tasks']) } user['tasks'].append(task) db.put(session['username'], user) return redirect('/tasks')
def delete(self, req, resp, id): does_service_exist = db.does_service_exist(id) if not does_service_exist: message = "Service %s does not exist" tokens = [id] code = "DSM-LITE-404" resp.status = falcon.HTTP_404 resp.body = error_response(code=code, message=message, tokens=tokens) return service_data = db.get(id) service_state = service_data.get("serviceState") if service_state not in ["Draft", "Reserved"]: message = "Invalid service state %s. Only services in Draft or Reserved state can be deleted." tokens = [service_state] code = "DSM-LITE-405" resp.status = falcon.HTTP_405 resp.body = error_response(code=code, message=message, tokens=tokens) return db.delete(id) resp.content_type = 'application/json' resp.body = success_response(None) resp.status = falcon.HTTP_200
def load_checks(self, check_ios): """ Load checks from the database. Arguments: check_ios (Dict(int->List(CheckIO))): Mapping of check IDs to a list of check input-output pairs to associate checks with Returns: List(Check,int): A list of checks and the ID of their associated services """ checks = [] cmd = "SELECT * FROM service_check" check_rows = db.get(cmd) for check_id, name, check_string, \ poller_string, service_id in check_rows: # Build check ios = check_ios[check_id] check_function = load_module(check_string) poller_class = load_module(poller_string) poller = poller_class() check = Check(check_id, name, check_function, ios, poller) # Update link from check IOs to this check for check_io in ios: check_io.check = check checks.append((check, service_id)) return checks
def _post_new_or_edit_(self): status, key = True, self.request.get('event-key') event = db.get(key) if key else Event(owner=self.current_user.id) try: event.title, event.time, event.place_name, event.visibility = ( self.request.get('event-name'), datetime.strptime(self.request.get('event-time'), '%Y-%m-%d %H:%M'), self.request.get('event-place-name'), self.request.get('event-visibility') ) place = self.request.get('event-place') if place: event.place = GeoPt(self.request.get('event-place')) except (ValueError, db.BadValueError): status = False if not status: self.response.clear() self.response.set_status(400) else: event.put() self.response.out.write(simplejson.dumps({ 'data': { 'key': event.key_str() } }, ensure_ascii=False))
def __writeDB4(self, db, tag, pkgid, pkg, useidx=True, func=str): """Add index entries for tag of RpmPackage pkg (with id pkgid) to a BSD database db. The tag has a single value if not useidx. Convert the value using func.""" tnamehash = {} if not pkg.has_key(tag): return for idx in xrange(len(pkg[tag])): if tag == "requirename": # Skip rpmlib() requirenames... #if key.startswith("rpmlib("): # continue # Skip install prereqs, just like rpm does... if isInstallPreReq(pkg["requireflags"][idx]): continue # Skip all files with empty md5 sums key = self.__getKey(tag, idx, pkg, useidx, func) if tag == "filemd5s" and (key == "" or key == "\x00"): continue # Equal Triggernames aren't added multiple times for the same pkg if tag == "triggername": if tnamehash.has_key(key): continue else: tnamehash[key] = 1 db[key] = db.get(key, "") + (pkgid + pack("I", idx)) if not useidx: break
def execute(self, message): if message.command == spec.ttypes.Command.INSERT: return db.insert(message.title, message.content) elif message.command == spec.ttypes.Command.GET: return db.get(message.id) elif message.command == spec.ttypes.Command.LIST: return db.list(message.count)
def display_chart_data(arduino_name): results = db.get(arduino_name) # there are rows returned if (len(results) > 0): humidity = [] temperature = [] light = [] date_time = [] for row in results: humidity.append(row[2]) # OR humidity.append(row['humidity']) temperature.append(row[3]) light.append(row[4]) date_time.append(row[5]) return render_template('chart.html', arduino=arduino_name, humidity=humidity, temperature=temperature, light=light, labels=date_time) else: return "There was an error getting the arduino data :(", 400
def signature(fname, blockcount=-1): audio_format = DB.get('format') if not audio_format: audio_format, start = get_audio_format(fname) if audio_format: logging.info("Setting this stream's audio format as %s" % audio_format) DB.set('format', audio_format) else: logging.warn("Can't determine type of file for %s." % fname) return False block = None if audio_format == _FORMAT_AAC: sig, block = aac_signature(fname, blockcount) if audio_format == _FORMAT_MP3 or not block: sig, block = mp3_signature(fname, blockcount) if len(block) > 0 and audio_format == _FORMAT_AAC: DB.set('format', _FORMAT_MP3) DB.clear_cache() return sig, block
def get_device_history(arduino_name): result = db.get(arduino_name) if not (len(result) > 0): return "The specified device not exist in the database", 400 else: return jsonify( result) # Return all the data stored for the specified arduino
def get_offset(force=False): # Contacts the goog, giving a longitude and lattitude and gets the time # offset with regard to the UTC. There's a sqlite cache entry for the offset. # Returns an int second offset. import misc # If we are testing this from an API level, then we don't # have a database if misc.IS_TEST: return 0 offset = DB.get('offset', expiry=ONE_DAY_SECOND) if not offset or force: from urllib2 import urlopen when = int(unixtime()) api_key = 'AIzaSyBkyEMoXrSYTtIi8bevEIrSxh1Iig5V_to' url = "https://maps.googleapis.com/maps/api/timezone/json?location=%s,%s×tamp=%d&key=%s" % ( misc.config['lat'], misc.config['long'], when, api_key) stream = urlopen(url) data = stream.read() opts = json.loads(data) if opts['status'] == 'OK': logging.info("Location: %s | offset: %s" % (opts['timeZoneId'], opts['rawOffset'])) offset = (int(opts['rawOffset']) + int(opts['dstOffset'])) / 60 DB.set('offset', offset) else: offset = 0 return int(offset)
def setup_method(self, method): db.clear() db.drivers.autoregister_class(TestDriver) db.from_url(MEM_URL) db.do(CREATE_FOO_SQL) db.do("INSERT INTO foo VALUES (1, 'foo')") self.db = db.get()
def _post_new_or_edit_(self): status, key = True, self.request.get('event-key') event = db.get(key) if key else Event(owner=self.current_user.id) try: event.title, event.time, event.place_name, event.visibility = ( self.request.get('event-name'), datetime.strptime(self.request.get('event-time'), '%Y-%m-%d %H:%M'), self.request.get('event-place-name'), self.request.get('event-visibility')) place = self.request.get('event-place') if place: event.place = GeoPt(self.request.get('event-place')) except (ValueError, db.BadValueError): status = False if not status: self.response.clear() self.response.set_status(400) else: event.put() self.response.out.write( simplejson.dumps({'data': { 'key': event.key_str() }}, ensure_ascii=False))
def test_dict_of_list_of_tuple(self): client = db.get() collection = 'test_dict_of_list_of_tuple' client.remove(collection) doc = { 'id': { 'date': datetime.datetime(2014, 1, 1), 'label': 'text' }, 'tuple_content': { 'a_tuple': (4, 7), 'a_boolean': True }, 'list_tuple_content': [(3, 2), (6, 3), (9.3805, 2.126)], 'nested_tuple_content': ['a string', { 'a_key': 'a value', 'a_list_of_tuples': [('ein', 3, False), (4, 2)] }] } client.insert(collection, doc) match = client.find(collection, doc) self.assertEqual(1, len(match)) self.assert_dict(doc, match[0]) # Clean up client.remove(collection)
def main(): quiet = (len(sys.argv) == 2 and sys.argv[1] == '-q') with db.conn.cursor() as c: if not quiet: print('updating items') for type_id, price in get_prices(): c.execute('UPDATE item_costs SET cost = %s WHERE type_id = %s', (price, type_id)) if c.rowcount == 0: c.execute('INSERT INTO item_costs (type_id, cost) VALUES(%s, %s)', (type_id, price)) db.conn.commit() r = db.get(c, 'SELECT cost FROM item_costs WHERE type_id = 33329') # Genolution 'Auroral' AU-79 global au79_cost au79_cost = r[0] if len(sys.argv) == 2 and sys.argv[1] == '-a': print('getting kills') c.execute('SELECT kill_id FROM kills') print('updating kills') while True: r = c.fetchone() if r is None: break update_kill(r[0]) if c.rownumber % 100 == 0: db.conn.commit() print('updated', c.rownumber, 'kills') db.conn.commit()
def top_cost(): with db.cursor() as c: last_kill = db.get(c, 'SELECT MAX(kill_id) AS kill_id FROM kills') kills = db.query(c, ''' SELECT kills.kill_id, cost, solar_system_id, kill_time, ship_type_id, typeName AS ship_name FROM kills JOIN kill_costs ON kill_costs.kill_id = kills.kill_id JOIN kill_characters ON kill_characters.kill_id = kills.kill_id JOIN eve.invTypes ON typeID = ship_type_id WHERE victim = 1 AND kills.kill_id > ? ORDER BY cost DESC LIMIT 25 ''', last_kill['kill_id'] - 2500) # joining eve.mapSolarSystems on the initial query causes filesort on large dbs for some reason # do a manual join system_ids = set(map(operator.itemgetter('solar_system_id'), kills)) system_rows = db.query(c, ''' SELECT solarSystemID as solar_system_id, solarSystemName AS system_name, security, class AS wh_class FROM eve.mapSolarSystems LEFT JOIN wh_systems ON solarSystemID = wh_systems.id WHERE solarSystemID IN ({}) '''.format(','.join(map(str, system_ids)))) systems = {} for system in system_rows: systems[system['solar_system_id']] = system for kill in kills: kill.update(systems[kill['solar_system_id']]) del kill['solar_system_id'] kill['security_status'] = _security_status(kill['security'], kill['wh_class']) kill['kill_time'] = _format_kill_time(kill['kill_time']) return kills
def get(self): """The client wants data if we find ANY data, respond with it immediately Otherwise, wait for data to arrive And respond as soon as any data is available. """ self.docs = [] self.rooms = [] query = self.request['query'].items() for key, v in query: start, end = startend(v) try: d = db.get(key,start,end) print 'found request, responding',d self.docs.extend(d) except IndexError: #the request was too high print 'waiting for '+key+'['+v+']' r = Room.get_by_name(key) self.rooms.append(r) r.enter(self) # did we find anything? if self.docs: self.send_response(json.dumps(self.docs)) return # are we waiting for data? if self.rooms: print 'waiting for updates' return # nothing. user submitted no keys # assume they want to know about themselves self.send_response(json.dumps({'user': db._hash(self.userkey)}));
def load_services(self, checks): """ Load services from the database. Arguments: checks (List(Check,int)): List of pairs of service IDs and the check to associate a service with Returns: List(Service): A list of services """ services = [] service_rows = db.get("SELECT * FROM service") for service_id, host, port in service_rows: schecks = [] for check in checks: if check[1] == service_id: schecks.append(check[0]) service = Service(service_id, host, port, schecks) # Update link from checks to this service for check in schecks: check.service = service services.append(service) return services
def edit(task_id): if 'username' not in session: return redirect('/') try: task_id = int(task_id) except: return render_template('error.html', message='Invalid task'), 400 user = db.get(session['username']) task = next((task for task in user['tasks'] if task['id'] == task_id), None) if task is None: return render_template('error.html', message='Task not found'), 404 if request.method == 'GET': return render_template('edit.html', id=task['id']) elif request.method == 'POST': form = forms.EditForm(request.form) if not form.validate(): return render_template('error.html', message='Invalid edit'), 400 for attribute in ['title', 'content', 'priority']: if form[attribute].data: task[attribute] = form[attribute].data db.put(session['username'], user) return redirect('/tasks')
def load_container(user_id, version=None): ''' TODO: Pull container from dockerhub and return it If none on dockerhub, create new one ''' if NEST.get(user_id) != None: return NEST.get(user_id) #remove_container(user_id) user = db.get("User", user_id) repo = "rubyshadows/{}".format(user_id) if version is None: version = user.container_version full = "{}:{}".format(repo, version) try: print("pulling image from repo") img = client.images.pull(repo, tag=str(version)) print("client.images.pull: {}".format(img)) container = client.containers.run(full, command="bash heart", detach=True) NEST[user_id] = container print("successful pull: {}".format(user_id)) return container except (docker.errors.ImageNotFound, docker.errors.APIError) as e: print("remote image not found: {}".format(full)) print(e) print("creating new container\n") return new_container(user_id)
def get(): if current_user.is_authenticated: return connection.get(request.args.get('col'), request.args.get('val')) else: return "please login"
def base_stats(): # Reports base-level statistical information about the health of the server. # This is used for the /stats and /heartbeat call. try: # for some reason this can lead to a memory error load = [ float(unit) for unit in os.popen( "uptime | awk -F : ' { print $NF } '").read().split(', ') ] except: load = 0 return { 'uptime': TS.uptime(), 'last_recorded': float(DB.get('last_recorded', use_cache=False) or 0), 'now': time.time(), 'version': __version__, 'load': load, 'plist': [ line.strip() for line in os.popen("ps auxf | grep [%s]%s" % (config['callsign'][0], config['callsign'][1:] )).read().strip().split('\n') ], 'disk': cloud.size('.') / (1024.0**3) }
async def inline(msg): print(msg['query']) tk = db.get(msg['from']['id']) if tk[0]: r, articles = await ainline(msg) else: r, articles = {}, [] if msg['query'] != '': a = await mux.auto(msg['query']) for i in a: hash = hashlib.md5(i['link'].encode()).hexdigest() r.update({hash: i['link']}) teclado = InlineKeyboardMarkup( inline_keyboard=[[dict(text='Aguarde...', callback_data='a')]]) articles.append( InlineQueryResultArticle( id=hash, title=f'{i["musica"]} - {i["autor"]}', thumb_url='https://piics.ml/i/010.png', reply_markup=teclado, input_message_content=InputTextMessageContent( message_text='Aguarde...', parse_mode='markdown', disable_web_page_preview=True))) db.tem(msg['from']['id'], r) print(r) await bot.answerInlineQuery(msg['id'], results=articles, is_personal=True, cache_time=0)
def get(self, hash): got = get(hash, 'utxos') try: self = self.from_json(got) except: return False return True
def _retrieve_json(uid): _check_db() jentry = db.get(uid) if not jentry: raise db.NonExistentUID(uid) else: return json.loads(jentry.decode())
def get_offset(force=False): # Contacts the goog, giving a longitude and lattitude and gets the time # offset with regard to the UTC. There's a sqlite cache entry for the offset. # Returns an int second offset. import misc # If we are testing this from an API level, then we don't # have a database if misc.IS_TEST: return 0 offset = DB.get('offset', expiry=ONE_DAY_SECOND) if not offset or force: from urllib2 import urlopen when = int(unixtime()) api_key = 'AIzaSyBkyEMoXrSYTtIi8bevEIrSxh1Iig5V_to' url = "https://maps.googleapis.com/maps/api/timezone/json?location=%s,%s×tamp=%d&key=%s" % (misc.config['lat'], misc.config['long'], when, api_key) stream = urlopen(url) data = stream.read() opts = json.loads(data) if opts['status'] == 'OK': logging.info("Location: %s | offset: %s" % (opts['timeZoneId'], opts['rawOffset'])) offset = (int(opts['rawOffset']) + int(opts['dstOffset'])) / 60 DB.set('offset', offset) else: offset = 0 return int(offset)
def login(username, password): sql = "SELECT Username, Password FROM OfficeEmployees WHERE Username = '******'" % username res = db.get(sql) for row in res: if row is not None: if hashlib.sha1(password).hexdigest() == row: return True return None
def list(self, stock_block_type): stock_block = None if stock_block_type == self.industry: stock_block = db.get(STOCK_BLOCK_INDUSTRY) if stock_block is None: stock_block = ts.get_industry_classified() db.save(STOCK_BLOCK_INDUSTRY, stock_block) elif stock_block_type == self.concept: stock_block = db.get(STOCK_BLOCK_CONCEPT) if stock_block is None: stock_block = ts.get_concept_classified() db.save(STOCK_BLOCK_CONCEPT, stock_block) elif stock_block_type == self.area: stock_block = db.get(STOCK_BLOCK_AREA) if stock_block is None: stock_block = ts.get_area_classified() db.save(STOCK_BLOCK_AREA, stock_block) elif stock_block_type == self.sme: stock_block = db.get(STOCK_BLOCK_SME) if stock_block is None: stock_block = ts.get_sme_classified() db.save(STOCK_BLOCK_SME, stock_block) elif stock_block_type == self.gem: stock_block = db.get(STOCK_BLOCK_GEM) if stock_block is None: stock_block = ts.get_gem_classified() db.save(STOCK_BLOCK_GEM, stock_block) elif stock_block_type == self.st: stock_block = db.get(STOCK_BLOCK_ST) if stock_block is None: stock_block = ts.get_st_classified() db.save(STOCK_BLOCK_ST, stock_block) elif stock_block_type == self.hs300s: stock_block = db.get(STOCK_BLOCK_HS300S) if stock_block is None: stock_block = ts.get_hs300s() db.save(STOCK_BLOCK_HS300S, stock_block) elif stock_block_type == self.sz50s: stock_block = db.get(STOCK_BLOCK_SZ50S) if stock_block is None: stock_block = ts.get_sz50s() db.save(STOCK_BLOCK_SZ50S, stock_block) elif stock_block_type == self.zz500s: stock_block = db.get(STOCK_BLOCK_ZZ500S) if stock_block is None: stock_block = ts.get_zz500s() db.save(STOCK_BLOCK_ZZ500S, stock_block) else: return None return stock_block
def _post_delete_(self): key = self.request.get('key') event = db.get(key) db.delete(event) self.response.out.write(simplejson.dumps({ 'data': { 'key': key } }, ensure_ascii=False))
def fetch_user(token, user_id): key = Vk_user.DB_KEY(user_id) if key in db.dict(): user = Vk_user.from_json(db.get(key)) if not user.outdated(): return user params = {'user_ids':user_id, 'fields':'photo_400_orig'} return Vk_user.from_api(token, params)
def get(self): start_timestamp = int(self.get_argument('start_timestamp')) end_timestamp = int(self.get_argument('end_timestamp')) logging.info('getdata request. from_timestamp = {0} count = {1}'.format(start_timestamp, end_timestamp)) posts = db.get(start_timestamp, end_timestamp) logging.info("response with {0} posts".format(len(posts))) self.write({'posts': posts})
def get_matches(vector, num_matches): vs = db.get(6) ls = [1, 2, 3, 4, 5, 6] for index in ls: r = sim_coeff(vector, vs[index]) print("Database: ") print vs[index] print("r") print r return
def _post_share_(self): key = self.request.get('key') event = db.get(key) event.fb_event_id = self.request.get('fb-event-id') event.put() self.response.out.write(simplejson.dumps({ 'data': { 'key': key } }, ensure_ascii=False))
def fetch(token, chat_id): key = Vk_chat.DB_KEY(chat_id) if key in db.dict(): chat = Vk_chat.from_json(db.get(key)) if not chat.outdated(): return chat params = {'chat_id':chat_id, 'fields':'first_name, last_name, photo_400_orig'} return Vk_chat.from_api(token, params)
def test_create_and_connect_to_two_separately_default_second(self): db.from_url(MEM_URL, db_name="db1") db.from_url(MEM_URL) db1 = db.get("db1") db2 = db.get() db1.do(CREATE_FOO_SQL) db2.do(CREATE_FOO_SQL) db1.do("INSERT INTO foo (value) VALUES (1)") db1.do("INSERT INTO foo (value) VALUES (2)") db1.do("INSERT INTO foo (value) VALUES (3)") db2.do("INSERT INTO foo (value) VALUES (4)") db2.do("INSERT INTO foo (value) VALUES (5)") db2.do("INSERT INTO foo (value) VALUES (6)") assert db1.item("SELECT SUM(value) AS n FROM foo").n == 6 assert db2.item("SELECT SUM(value) AS n FROM foo").n == 15
def setup_method(self, method): db.clear() import db_psycopg2 db.from_environ("DB_PSYCOPG2_TEST_URL") self.db = db.get() db.do("""CREATE OR REPLACE FUNCTION always1() RETURNS INTEGER AS $$ BEGIN RETURN 1; END $$ LANGUAGE 'plpgsql'; """)
def edit_simple(table_name): values = db.get(table_name) headers = [] if len(values) > 0: headers = values[0].__dict__.keys() return flask.render_template('edit/simple.html', table = table_name, headers = headers, values = values, )
def initial_dp_list(): arr=[] for i in xrange(1,20): rev=type('revenue', (object,), json.loads(d.get(db_name,'revenue',i))) inv=type('inventory', (object,), json.loads(d.get(db_name,'inventory',i))) decision=selection(inv,rev) if(decision==1): arr.append(rev.id) print arr ar=[] for i in xrange(0,len(arr)): y=[] q=type('product', (object,), json.loads(d.get(db_name,'product',arr[i]))) y.append(q.id) y.append(str(q.name)) y.append(q.number_items_dp) y.append(q.inventory['maxi']) ar.append(y) print ar return ar
def real_time_data(): final_list=[19] data=[] for i in xrange(0,len(final_list)): x=[] q=type('product', (object,), json.loads(d.get(db_name,'product',final_list[i]))) x.append(q.id) x.append(str(q.name)) x.append(q.bids['tot']) x.append(q.bids['cur']) x.append(int (q.revenue['dp'])) data.append(x) print data return