async def inline_kb_answer_callback_handler(query: types.CallbackQuery): answer_data = query.data global highlight global userid global user id = query.from_user.id text = query.message.caption if answer_data == 'ru' or answer_data == 'en': db.execute_query( conn, f'UPDATE users SET lang = "{answer_data}" WHERE user_id = "{id}"') await bot.edit_message_text(chat_id=id, message_id=query.message.message_id, text=key[answer_data][0]) elif len(answer_data) > 1: loop, pool, it = map(str, answer_data.split('-')) if loop == 'posts': user = pool await send_posts(id, posts=Profile.from_username(L.context, user).get_posts()) else: userid = int(pool) if loop == 'stories': await send_stories(id) elif loop == 'highlights': await info_highlights(id) elif loop == 'highlight': i = 0 for highlight in L.get_highlights(userid): if i == int(it): highlight = highlight await send_highlights(id) break i += 1
def login(): """ Endpoint: 'api/login' EXAMPLE Parameter http://127.0.0.1:5000/api/login Verb: GET Parameter: user_token, name, email Output: { 'drink_id': [], 'drink_name' = [], 'drink_rating' = []} """ try: data = request.get_json() user_token = data['user_token'] name = data['name'] email = data['email'] sql_check = f"""select user_token from account where user_token = '{user_token}' """ conn = db.create_connection() output = db.execute_read_query(conn, sql_check) if not output: sql_insert = f"""INSERT INTO account VALUES('{user_token}', '{name}', '{email}', {1}) """ db.execute_query(conn, sql_insert) else: sql_update = f"""UPDATE account set status = 1 where user_token = '{user_token}' """ db.execute_query(conn, sql_update) conn.close() return dumps({'is_success': 200}) except: return dumps({'is_success': 500})
def keepAlive(self): if self.connection is None: return try: log('sending keep alive... ', False, True) t0 = time.time() db.execute_query(self.connection, 'select * from dummy', []) t1 = time.time() log('ok: %s ms' % (str(round(t1 - t0, 3))), True) except dbException as e: log('Trigger autoreconnect...') try: conn = db.create_connection(self.server) if conn is not None: self.connection = conn log('Connection restored automatically') else: log('Some connection issue, give up') self.connection = None except: log('Connection lost, give up') # print disable the timer? self.connection = None
def put(self, id): ''' Update a single product based on ID''' idToUpdate = str(id) infoToSearch = "SELECT * from products WHERE id=" + idToUpdate result = execute_read_query(infoToSearch) if len(result) == 0: return "Cannot update", 404 else: updatedInfo = [ request.json.get('name'), request.json.get('description'), str(request.json.get('price')), str(request.json.get('qty')) ] updateID = "UPDATE products SET name ='" + updatedInfo[ 0] + "', description ='" + updatedInfo[ 1] + "', price =" + updatedInfo[ 2] + ", qty =" + updatedInfo[ 3] + " WHERE id=" + idToUpdate execute_query(updateID) result = execute_read_query(infoToSearch) jsonResult = vars( ProductModel(result[0][1], result[0][2], result[0][3], result[0][4])) return "Product id: " + idToUpdate + " is updated. " + str( jsonResult), 200
def update_ratings(): """ Endpoint: '/update_ratings' EXAMPLE Parameter http://127.0.0.1:5000/api/update_ratings?rating=3.0&drink_id=1 Verb: POST Parameter: rating, drink_id in json dict Output: { 'old_rating' : curr_rating, 'new_rating' : new_rating} """ # returns none if no input data = request.get_json() rating = data['rating'] drink_id = data['drink_id'] # query the database for current rating sql_query = f"""select rating from drink where id = {drink_id}""" conn = db.create_connection() output = db.execute_read_query(conn, sql_query) curr_rating = output[0][0] # NOTE: need to include num of ratings to determien average new_rating = round((curr_rating + float(rating)) / 2.0, 2) update_post_description = f"""update drink set rating = {new_rating} WHERE id = {drink_id}""" db.execute_query(conn, update_post_description) # test change # select_users = "SELECT * FROM drink" # out = db.execute_read_query(conn, select_users) conn.close() return dumps({ 'old_rating': curr_rating, 'new_rating': new_rating, # 'output' : out, })
def initHosts(self, hosts, hostKPIs, srvcKPIs): kpis_sql = sql.kpis_info if not self.connection: log('no db connection...') return log('init hosts: %s' % str(hosts)) log('init hosts, hostKPIs: %s' % str(hostKPIs)) log('init hosts, srvcKPIs: %s' % str(srvcKPIs)) sql_string = sql.hosts_info t0 = time.time() rows = db.execute_query(self.connection, sql_string, []) if cfg('hostmapping'): for i in range(0, len(rows)): hm = cfg('hostmapping') pm = cfg('portmapping') hosts.append({ 'host': rows[i][0].replace(hm[0], hm[1]), 'port': rows[i][1].replace(pm[0], pm[1]), 'from': rows[i][2], 'to': rows[i][3] }) else: for i in range(0, len(rows)): hosts.append({ 'host': rows[i][0], 'port': rows[i][1], 'from': rows[i][2], 'to': rows[i][3] }) rows = db.execute_query(self.connection, kpis_sql, []) kpiDescriptions.initKPIDescriptions(rows, hostKPIs, srvcKPIs) t1 = time.time() try: dpDBCustom.scanKPIsN(hostKPIs, srvcKPIs, kpiDescriptions.kpiStylesNN) except Exception as e: log('[e] error loading custom kpis') log('[e] fix or delete the problemmatic yaml for proper connect') raise e t2 = time.time() kpiDescriptions.clarifyGroups() log('hostsInit time: %s/%s' % (str(round(t1 - t0, 3)), str(round(t2 - t1, 3))))
def add_test_dataset_into_db(title='test_dataset'): tmp_dir, tmp_files = create_test_files_pack(title, 10) with create_connection() as conn: for i, f in enumerate(tmp_files): execute_query( conn, f'INSERT INTO datasets (id, title, filename) VALUES ("{i}", "{title}", "{f}");' ) return tmp_dir, tmp_files
def purge_user_session(userid): execute_query( """ DELETE FROM user_sessions WHERE user_id = %s """, landlord, (userid, ), False, )
async def start(message: types.Message): row = db.execute_one_query( conn, f'SELECT * FROM users WHERE user_id = "{message.from_user.id}"') if row == None: db.execute_query( conn, F'INSERT INTO users VALUES ("{message.from_user.id}","en")') lang = types.InlineKeyboardMarkup(row_width=2).add( types.InlineKeyboardButton(text=f'Русский', callback_data="ru"), types.InlineKeyboardButton(text=f'English', callback_data='en')) await bot.send_message(message.chat.id, 'Hello, choose language :)', reply_markup=lang)
def delete(self, id): ''' Delete a single product based on ID''' idToDelete = str(id) infoToSearch = "SELECT * from products WHERE id=" + idToDelete result = execute_read_query(infoToSearch) if len(result) == 0: return "Nothing to delete. Since, it is empty.", 404 else: idToSearch = "DELETE FROM products WHERE id=" + idToDelete execute_query(idToSearch) return "Product with id: " + str( id) + " is successfully deleted.", 200
def handle_ops(fetch_query, delete_query): records = execute_query(fetch_query) print(fetch_query) if records: print("There are records.") csv_path, filename = create_csv(records) execute_query(delete_query) upload_to_drive(csv_path, filename) os.remove(csv_path) else: print("There are no records for this query.") send_message("There are no records for this query.")
def read_cps_files(): all_files = [f for f in listdir('cps/') if isfile(join('cps/', f))] for file_name in all_files: print(file_name) doc = xml.dom.minidom.parse('cps/' + file_name) cp_coords_data = [] cp_coords_detail_data = [] placemark_xml_list = doc.getElementsByTagName("Placemark") for placemark in placemark_xml_list: cp = "" cps_xml = placemark.getElementsByTagName("SimpleData") for cp_xml in cps_xml: cp = cp_xml.firstChild.data coords_xml_list = placemark.getElementsByTagName("coordinates") for coords_xml in coords_xml_list: formatted_coords = (str(coords_xml.firstChild.data).replace( '\n', ' ')) formatted_coords = re.sub('\s+', ' ', formatted_coords) coords_list_separated = formatted_coords.split(' ') for cls in coords_list_separated: if len(cls) > 1: lonlat = cls.split(',') longitudy = float(lonlat[0]) latitudx = float(lonlat[1]) cp_coords_detail = { 'cp': cp, 'lat': str(latitudx), 'lon': str(longitudy), 'file_name': file_name } cp_coords_detail_data.append(cp_coords_detail) cp_coords = { 'cp': cp, 'coords': formatted_coords, 'file_name': file_name } cp_coords_data.append(cp_coords) execute_query('DELETE FROM cp_coords where file_name = \'%s\'' % file_name) execute_insert('cp_coords', cp_coords_data) execute_query('DELETE FROM cp_coords_detail where file_name = \'%s\'' % file_name) execute_insert('cp_coords_detail', cp_coords_detail_data)
def get_sales(): sales = 0 query = f"select * from Invoice_Items" records = execute_query(query) for column in records: sales += column[3] * column[4] return "Sales amount: {}".format(round(sales, 2))
def add_rule_simple(connection, type, address, duration, user, comment): """Creates a blocking rule and returns it ID and a dict describing it.""" duration_type, duration_value = duration or (None, None) query = durationify(ADD_RULE_QUERY, duration_type) parameters = {'type': type, 'address': address, 'duration': duration_value, 'user': user, 'comment': comment} row = db.execute_query(connection, query, parameters)[0] return row.br_id, dict_from_rule_row(row)
def pull_study(): try: data = request.json service_name = data['name'] session_token = data['session_token'] except Exception: abort(400, 'POST request content is incorrect (should contain name, session_token).') with create_connection() as conn: # check is_active and not is_expired session = check_token(conn, service_name, session_token) # retrieve first empty row inside session ds = pd.read_sql_query(f""" SELECT t.id AS id, t.requests AS requests, t.dataset_title AS dataset_title, d.filename AS filename FROM testing AS t LEFT JOIN datasets AS d ON t.dataset_file_id=d.id WHERE t.received IS NULL AND t.session = {session} ORDER BY t.id LIMIT 1 """, conn) # check for empty response if len(ds.index) == 0: abort(404, 'No available items for this session_token') else: ds = ds.iloc[0] # update time of retrieval update_time_of_retrieval = f""" UPDATE testing SET retrieved=datetime('now'), requests={ds.requests+1} WHERE id={ds.id}; """ execute_query(conn, update_time_of_retrieval) resp = make_response(send_file(f'datasets/{ds.dataset_title}/{ds.filename}', 'application/zip')) resp.headers['testing_item_id'] = re.match(r'^(.*)\.zip$', ds.filename)[1] return(resp)
def get_data(analyte, pid): sql = ''' select * from MajorChemistry as MC where MC.Analyte=%d and MC.SamplePointID like %d and MC.AnalysisDate is not NULL order by MC.AnalysisDate ''' pid = '{}%'.format(pid) return execute_query(sql, (analyte, pid))
def get_sales(): query = f"select * from invoice_items" records = execute_query(query) total = 0 for elem in records: total += (elem[3] * elem[4]) print(total) return f'Сумма продаж = {total}'
def get_pointids(analyte): sql = '''select L.PointID from Location as L join MajorChemistry as MC on substring(MC.SamplePointID, 0, LEN(MC.SamplePointID))=L.PointID where MC.Analyte=%d and L.PublicRelease=1 and MC.AnalysisDate is not NULL and MC.AnalysesAgency='NMBGMR' group by L.PointID order by L.PointID ''' return [p['PointID'] for p in execute_query(sql, analyte)]
def get_overlapping_active_rules_by_type(connection, type, address, excluded_id=-1): """Returns all active rules that have at least one IP address in common with the given address.""" query = GET_OVERLAPPING_ACTIVE_RULES_BY_TYPE_QUERY parameters = {'type': type, 'address': address, 'excluded_id': excluded_id} return db.execute_query(connection, query, parameters, dict_from_rule_row)
def delete(self, id): ''' Delete a single product based on ID''' delete_product = "DELETE FROM products WHERE id=%s" % id products = execute_query(delete_product) if len(product) == 0: return 'no product present', 404 else: return 'Product with product number %s is deleted' % id, 200
def select_bookmarks(cursor): bookmarks_query = """ SELECT DISTINCT url, moz_places.title, dateAdded from moz_places JOIN moz_bookmarks on moz_bookmarks.fk=moz_places.id WHERE moz_places.url like 'http%' and dateAdded > ? ORDER BY dateAdded ASC """ bm_date = get_last_row_bm_date() if not bm_date: bm_date = 100000 d = datetime.datetime.fromtimestamp(bm_date / 1000000) print("Indexing bookmarks added after: " + str(d)) db.execute_query(cursor, bookmarks_query, [bm_date]) return cursor
def get_customers(country=None, city=None): if country and city: query = f"select * from Customers where Country = '{country}' AND City = '{city}'" elif country or city: query = f"select * from Customers where Country = '{country}' OR City = '{city}'" else: query = f"select * from Customers" records = execute_query(query) return format_records(records)
def cancel_rule_simple(connection, type, address, user, comment): """Performs the actual cancelation of the whitelist entries for the given address.""" query = CANCEL_RULE_QUERY parameters = { 'type': type, 'address': address, 'user': user, 'comment': comment } return db.execute_query(connection, query, parameters, dict_from_rule_row)
def logout(): """ Endpoint: 'api/logout' EXAMPLE Parameter http://127.0.0.1:5000/api/login Verb: POST Parameter: user_token, name, email Output: { 'is_success': 1|0 } """ try: data = request.get_json() user_token = data['user_token'] sql_update = f"""UPDATE account set status = 0 where user_token = '{user_token}' """ conn = db.create_connection() db.execute_query(conn, sql_update) conn.close() return dumps({'is_success': 1}) except: return dumps({'is_success': 0})
def generate_session_token(userid): # First, purge existing sessions (one session per user for security) purge_user_session(userid) session_id = str(uuid.uuid4()) execute_query( """ INSERT INTO user_sessions( id, user_id, login_time ) VALUES ( %s, %s, NOW() ) """, landlord, (session_id, userid), False, ) return session_id
def run(self, dispatcher: CollectingDispatcher, tracker: Tracker, domain: Dict[Text, Any]) -> List[Dict[Text, Any]]: user_id = get_user_id(tracker) exercise_name = tracker.get_slot('changing_exercise') number = tracker.get_slot('number') if number.endswith('회') or number.endswith('번'): number = int(number[:-1]) db.execute_query(f'update user_routine set reps={number} where user_id={user_id} and exercise_name=\'{exercise_name}\'') m = f"{exercise_name}의 횟수가 {number}회로 변경되었어!" elif number.endswith('분'): number = int(number[:-1]) db.execute_query(f'update user_routine set time={number} where user_id={user_id} and exercise_name=\'{exercise_name}\'') m = f"{exercise_name}의 시간이 {number}분으로 변경되었어!" elif number.endswith('세트'): number = int(number[:-2]) db.execute_query(f'update user_routine set sets={number} where user_id={user_id} and exercise_name=\'{exercise_name}\'') m = f"{exercise_name}의 세트수가 {number}세트로 변경되었어!" dispatcher.utter_message(m) if debug: dispatcher.utter_message("action_change_routine called!") return [SlotSet("changing_exercise", None),SlotSet("number", None)]
def get_greatest_hits(count=None): query = f"SELECT tracks.Name, sum(invoice_items.UnitPrice * invoice_items.Quantity) as total, count(*) " \ f"FROM tracks " \ f"JOIN invoice_items " \ f"on tracks.TrackId = invoice_items.TrackId " \ f"GROUP by tracks.Name " \ f"ORDER by total DESC" if count: limit = f"LIMIT '{count}'" query = query + " " + limit records = execute_query(query) return format_records(records)
def keepAlive(self): if self.connection is None: log('no connection, disabeling the keep-alive timer') self.timer.stop() return try: log('chart keep-alive... ', 3, False, True) t0 = time.time() db.execute_query(self.connection, 'select * from dummy', []) if hasattr(self, 'fakeDisconnect'): log('generate an exception...') log(10 / 0) t1 = time.time() log('ok: %s ms' % (str(round(t1 - t0, 3))), 3, True) except dbException as e: log('Trigger autoreconnect...') try: conn = db.create_connection(self.server) if conn is not None: self.connection = conn log('Connection restored automatically') else: log('Some connection issue, give up') self.timer.stop() self.connection = None except: log('Connection lost, give up') self.timer.stop() self.connection = None except Exception as e: log('[!] unexpected exception, disable the connection') log('[!] %s' % str(e)) self.connection = None
def extract_thing_properties(p): sql = '''select TOP(1) *, ludr.MEANING as [DR_Meaning], lumm.MEANING as [MM_Meaning], luam.MEANING as [AM_Meaning] from dbo.{} as P join Location as L on L.PointID = P.PointID join dbo.LU_MeasurementMethod as lumm on lumm.Code = P.MeasurementMethod join dbo.LU_DataReliability as ludr on ludr.Code = L.DataReliability join dbo.LU_AltitudeMethod as luam on luam.Code = L.AltitudeMethod where P.PointID=%d '''.format(TABLE) return execute_query(sql, p)
def get_pointids(): sql = ''' select WL.PointID, WL.MeasuringAgency from ProjectLocations as T join WaterLevels as WL on T.PointID = WL.PointID --join Location as L on L.PointID = T.PointID join WaterLevelsContinuous_Acoustic as WL on T.PointID = WL.PointID where T.ProjectName='Water Level Network' and L.PublicRelease=1 group by WL.PointID, WL.PointID, WL.MeasuringAgency order by WL.PointID ''' pids = [(pid['PointID'], pid['MeasuringAgency']) for pid in execute_query(sql)] return pids
def create_issue(form_data): try: execute_query( """ INSERT INTO issues( title, description, status, unit, issue_type, submitted, priority ) VALUES ( %s, %s, %s, %s, %s, NOW(), %s ) """, landlord, ( form_data["title"], form_data["description"], form_data["status"], form_data["unit"], form_data["issue_type"], form_data["priority"], ), False, ) return True except Exception as e: print(e) return False
def extract_data2(p): sql = '''select *, L.GEOMETRY.STY as [NorthingGG], L.GEOMETRY.STX as [EastingGG], L.GEOMETRY.STSrid as [SRID], ludr.MEANING as [DR_Meaning], lumm.MEANING as [MM_Meaning], CAST(DateMeasured as DateTime) as DateTimeMeasured from NM_Aquifer.dbo.{} as P join Location as L on L.PointID = P.PointID join dbo.LU_MeasurementMethod as lumm on lumm.Code = P.MeasurementMethod join dbo.LU_DataReliability as ludr on ludr.Code = L.DataReliability where P.PointID=%d order by P.DateMeasured asc '''.format(TABLE) return execute_query(sql, p)
def do_query(self, arg): 'Allows execution of arbitrary queries of the operational database; use with care [query <SQL statement>]' query = arg if re.compile('^show tables[\;]?$').match(arg): query = "select * from sqlite_master;" if not query.endswith(';'): query += ';' output = '' try: results = db.execute_query(query) output = '\n' + db.convert_results_to_string(results) + '\n' except sqlite3.OperationalError: output = '\nEncountered error while attempting to execute the provided query; please check your syntax and try again\n' print(output) self.log_command(arg, output)
def get_overlapping_entries(connection, address): query = GET_HISTORY_FOR_ADDRESS_QUERY parameters = {'address': address} return db.execute_query(connection, query, parameters, db.common.dict_from_rule_row)
def get_existing_whitelist_entries_for_address(connection, address): """Fetches existing whitelist entries for the given exact address.""" query = GET_EXISTING_WHITELIST_ENTRIES_QUERY parameters = {'address': address} return db.execute_query(connection, query, parameters, db.common.dict_from_rule_row)
def get_existing_longer_blacklist_entries(connection, address, duration): """Fetches longer-lasting blacklist entries for the given exact address.""" duration_type, duration_value = duration query = db.common.durationify(GET_EXISTING_LONGER_BLACKLIST_ENTRIES_QUERY, duration_type) parameters = {'address': address, 'duration': duration_value} return db.execute_query(connection, query, parameters, db.common.dict_from_rule_row)
def supersede_shorter_blacklist_entries(connection, address, duration, user, superseding_id): """Marks existing shorter blacklist entries for the given exact address as superseded and returns those entries.""" duration_type, duration_value = duration query = db.common.durationify(SUPERSEDE_SHORTER_BLACKLIST_ENTRIES_QUERY, duration_type) parameters = {'address': address, 'duration': duration_value, 'user': user, 'superseding_id': superseding_id} return db.execute_query(connection, query, parameters, db.common.dict_from_rule_row)
def get_active_rules_by_type(connection, type): """Fetches the blocking rules of the given type that are currently active from the database.""" query = GET_ACTIVE_RULES_BY_TYPE_QUERY parameters = {'type': type} return db.execute_query(connection, query, parameters, dict_from_rule_row)