def before_request(): db.open_db() if 'user_id' in session: if not session['zipcode']: session['zipcode'] = db.get_user_address(session['user_id'])[4] if not session.get('zipcode'): session['zipcode'] = '46989'
async def have_drink(ctx, drink): data = open_db("database/passports.json") items = open_db("database/items.json") drinks = items["drinks"] user_name = str(ctx.author) inventory = data[user_name]["items"] if drink in drinks and drink in inventory: inventory.remove(drink) update_db("database/passports.json", data) await ctx.send(ctx.author.name + ' выпил ' + drink) return await ctx.send("У вас нет такого предмета или его нельзя пить")
def upload_df_with_batches(SQL, df, queue=None, batch_size=500): # чтобы cursor.execute() это ел, нужно NaN заменить на None # поскольку согласно stackoverflow, такая замена может вызвать некорректную работу датафрейма, # мы это делаем непосредственно перед загрузкой df = df.where(df.notnull(), None) batches = list(split_df(df, batch_size)) length = len(batches) logging.info( f'ALL ITEMS: {sum([len(batch) for batch in batches])} IN {length} BATCHES' ) logging.info(f'DF SHAPE: {df.shape}') warnings_ = [] if not queue: cur, conn = open_db() logging.info(f"Uploading items") progress_string = 'Uploading dataframe to DB: ' printProgressBar(0, length, prefix=progress_string, suffix='Complete', length=100) for i, batch in enumerate(batches): warnings_ = upload_batch(SQL, batch, cur, conn) printProgressBar(i + 1, length, prefix=progress_string, suffix='Complete', length=100) conn.commit() close_db(cur, conn) else: print('Putting {} to Queue...'.format(batches[0][0]['bkf_filename'])) for batch in batches: queue.put((SQL, batch)) show_1265_warnings(warnings_)
def open_db(self, server): if server in self.db and self.db[server]: self.db[server].close() self.db[server] = open_db(self.config['servers'][server.name]['db']) if 'matches' not in self.db[server]: self.db[server]['matches'] = {} if 'teams' not in self.db[server]: self.db[server]['teams'] = {} if 'captains' not in self.db[server]: self.db[server]['captains'] = {} if 'groups' not in self.db[server]: self.db[server]['groups'] = {} if 'roles' not in self.db[server]: self.db[server]['roles'] = {} if 'sroles' not in self.db[server]: self.db[server]['sroles'] = {} # Refill group cache self.db[server]['sroles'] = {} self.cache_special_role(server, 'captain') self.cache_special_role(server, 'referee') self.cache_special_role(server, 'streamer')
def receive(): def dict_factory( cursor, row ): # funzione(presa online) che consente di accedere ai valori attraverso i nomi dei campi d = {} for idx, col in enumerate(cursor.description): d[col[0]] = row[idx] return d conn = open_db() # apro il db conn.row_factory = dict_factory # vedere la funzione cur = conn.cursor() query_parameters = request.args # parametri ricevuti receiver_id = query_parameters.get( 'receiver_id' ) # prendo il receiver_id per prendere solo i messaggi indirizzati al destinatario richiesto messages = cur.execute( 'SELECT * FROM Server_messages WHERE is_received = 0 AND receiver_id = ?', (receiver_id, )).fetchall( ) # assegno a message il dizionario finale da inviare al client # setto la variabile is_received = 1 cur.execute( 'UPDATE Server_messages SET is_received = 1 WHERE receiver_id = ?', (receiver_id, )) conn.commit() close_db(conn) return jsonify(messages) # effettuo il json e lo ritorno
def list_latest(coin): db = open_db() change = None sell_old = None for i, r in enumerate( sorted(db._abbr[coin.lower()], key=sort_date, reverse=False)[-11:]): sell = r.get('sell') change = ((sell / sell_old) - 1) * 100 if sell_old else None line = '' line += r.get('date').strftime('%m-%d %H:%M') digits = math.floor(math.log10(r.get('sell'))) assert digits <= 4, "Numbers too large to print" width = 5 if digits >= 0 and digits < 4: decimals = width - 3 - digits else: decimals = -min(0, digits) line += ' €{0:{width}.{decimals}f}'.format( r.get('sell'), width=width, decimals=0 if not decimals else decimals + 1) line += ' %0.2f%%' % abs(change) if change is not None else '' line += '%s' % (PLUS_ICON if change >= 0 else MINUS_ICON) if change is not None else '' if i == 0: print('%s (%s)' % (r.get('name'), r.get('abbr').upper())) else: print(line) sell_old = sell
def sendText(): # funzione per inviare testi conn = open_db() cur = conn.cursor() host_name = input( f'Inserisci l\'del server, premi invio per confermare questo({Server_ip}): ' ) # visto che si cambiera spesso server ho deciso di metterlo tra gli input, in caso # stessi testando in locale effettua semplicemente invio if host_name == '': host_name = Server_ip # quindi se fai invio assegno l'ip della macchina, controlla conf.py per vedere come prendo l'ip. receiver_id = input( f'Inserisci l\'ID del destinatario, premi invio per confermare questo({MY_ID}): ' ) # inserisco il destinatario, siccome sono il num. 14 nel registro durante i test ho settato questo valore # vale lo stesso discorso per l'ip, invio per confermare if receiver_id == '': receiver_id = MY_ID if cur.execute( 'SELECT * FROM users WHERE id = ?', (receiver_id, )).fetchone(): #verifico che l'id inserito sia esistente nel db text = input('Inserisci il testo: ').replace( ' ', '+') # sostituisco eventuali spazio con il + print(send_url % (host_name, MY_ID, text, receiver_id)) get( send_url % (host_name, MY_ID, text, receiver_id) ) # faccio la chiamata all'url http://%s:5000/api/v1/send?sender_id=%s&text=%s&receiver_id=%s passando i valori necessari print('Messaggio inviato!') else: print('Non c\'e\' nessun utente con questo id')
def validate_services(services, timeout, db_file): ''' worker function to validate several services :param services: array of services returned by SQL query :param timeout: timeout for calling the validator :param db_file: name of the sqlite3 DB file ''' # open our own connection to the DB (since we are running in parallel) logging.info("Opening DB file %s", db_file) conn = db.open_db(db_file) no_service = 0 nb_services = len(services) logging.info("Worker starting to process %d services", nb_services) for service in services: no_service = no_service + 1 logging.info("Processing service %d/%d", no_service, nb_services) validate_service(conn, service, timeout) # retrieve individual columns - same order as query in main #time.sleep(2) # close DB connection before end of ps logging.info('Closing DB connection') conn.close() logging.info("Worker finished with %d services processed", no_service) return
def add(): name = request.form['name'] price = int(request.form['price']) qty = int(request.form['qty']) good = Good(0, name, price, qty) db.add(db.open_db(db_url), good) return redirect(url_for('index'))
def get_website_info(website_id): with db.open_db() as conn: with conn.cursor() as cur: query = "SELECT domain,use_https,db_password,files_password,version FROM users_website WHERE id=%s" cur.execute(query, (website_id, )) data = cur.fetchone() return data
def edit_by_id(id): name = request.form['name'] price = int(request.form['price']) qty = int(request.form['qty']) good = Good(id, name, price, qty) db.update(db.open_db(db_url), good) return redirect(url_for('details_by_id', id=id))
def receiveText( ): # Funzione per leggere i messaggi piu recenti(quelli con il campo is_read = 1) che verrano successivamente settati a 0 1=True|0=False conn = open_db() cur = conn.cursor() print( '------------------------------------------M E S S A G G I--------------------------------------------\n' ) if cur.execute('SELECT * FROM Client_messages WHERE is_read = 0').fetchall( ): # Verifico se ci sono nuovi messaggi, se non ce ne sono ritorna lista vuota for msg in cur.execute( 'SELECT * FROM Client_messages WHERE is_read = 0').fetchall( ): # Seleziono quindi tutti i messaggi non letti(is_read = 0) name = cur.execute( 'SELECT name FROM Users WHERE id = ?', (msg[2], )).fetchone()[ 0] # Seleziono il nome del mittente attraverso l'id print( f"{name}[id: {msg[2]}]: {msg[3]} at {datetime.fromtimestamp(float(msg[4])).strftime('%H:%M')}" # effettuo la print finale ) cur.execute( 'UPDATE Client_messages SET is_read = 1 WHERE is_read = 0' ) # setto quindi i messaggi a letto(is_read=1) per non mostrarli in seguito conn.commit() else: print('Non ci sono messaggi nuovi!') print( '\n\n--------------------------------------------------------------------------------------------------' ) close_db(conn)
def __init__(self, content, root, url, verbose=False): self.content = content self.verbose = verbose self.url = urllib.quote(url) self.root = root self.matches = [] self.r = db.open_db()
def branches_indexes(self): """здесь делается селект, так как это поле запрашивается в скрипте только однажды""" cur, conn = open_db() SQL = f"""SELECT * FROM {self.gp_branches_table}""" cur.execute(SQL) branches_indexes = {br['gpb_name']: br['gpb_id'] for br in cur} close_db(cur, conn) return branches_indexes
def get_price_between_dates(coin, min_date, max_date): db = open_db() price = None for rec in sorted((db('abbr') == coin) & (db('date') >= max_date) & (db('date') < min_date), key=sort_date): price = rec.get('sell') break return price
def main(stdscr): db.open_db() stdscr.clear() curses.noecho() curses.start_color() curses.use_default_colors() curses.init_pair(1, curses.COLOR_WHITE, curses.COLOR_BLACK) curses.init_pair(2, curses.COLOR_WHITE, curses.COLOR_GREEN) curses.init_pair(3, curses.COLOR_WHITE, curses.COLOR_RED) no_color = curses.color_pair(1) selection_color = curses.color_pair(2) error_color = curses.color_pair(3) username = "" password = "" buffer = "" current_state = "login" while True: #stdscr.addstr(0, 0, banner) stdscr.addstr(0, 0, "Nethackathon 2021") if "func" in states[current_state]: open("state", "a").write(current_state) current_state = states[current_state]["func"](states, stdscr) open("state", "a").write(current_state) open("state", "a").write(current_state) if states[current_state]["prompt"]: stdscr.addstr(states[current_state]["y"], 8, states[current_state]["prompt_text"]) stdscr.addstr(states[current_state]["y"] + 1, 8, " " * 20) stdscr.addstr(states[current_state]["y"] + 1, 8, states[current_state]["buffer"]) stdscr.refresh() v = stdscr.getch() if -1 == v: continue if v == 263: # Backspace states[current_state]["buffer"] = states[current_state][ "buffer"][:-1] elif v == 10: current_state = states[current_state]["next_state"] stdscr.clear() else: states[current_state]["buffer"] += chr(v)
def start() -> int: if os.name == "posix" or os.name == "nt": path = os.path.join("authentication", "notes") else: raise Error("Error in main.start(): Operating system not supported") if not os.path.exists(path): os.mkdir(path) try: db.open_db() except db.db_Error as e: print(str(e)) return -1 try: db.create_table() except db.db_Error as e: print(str(e)) return -1 return 0
def load_positions(positions_ids): Q = """SELECT if_id, if_zgr_orid as ppsys_id, COALESCE(if_item_text, "") as item_text FROM table WHERE if_id in ({}) """ cur, conn = open_db() cur.execute(Q.format(",".join(map(str, positions_ids)))) positions = [Position(**item) for item in cur] close_db(cur, conn) return positions
def upgrade_table(positions): QU = """ UPDATE table SET field = "%s", status_field = 1 WHERE id_field = %s ;""" cur, conn = open_db() for pos in positions: cur.execute(QU % (pos.keywords, pos.id)) conn.commit() close_db(cur, conn)
def import_files(): conn = open_db() conn.execute("BEGIN") rows = conn.execute("SELECT * FROM files " "WHERE path LIKE '%.FIT' " "AND imported = FALSE") for row in rows: file = File.from_db(row) print("Parsing", file.path) count = parse_import_activity(file, conn) print(f"Imported {count} frames") file.mark_imported(conn) conn.execute("COMMIT")
def download_files(device_root: Path): conn = open_db() conn.execute("BEGIN") max_seq = conn.execute("SELECT MAX(download_seq) FROM files").fetchone()[0] if max_seq is None: max_seq = 0 for file in walk_files(device_root): if not file.exists_in_db(conn): print(f"Found new file {file.path} (modified {file.modified})") file.insert(conn, max_seq + 1) conn.execute("COMMIT")
def fill_gp_branches(self): branches = [ e.name for e in os.scandir(startdir) if e.is_dir() and not e.name.startswith('.') ] cur, conn = open_db() for branch in branches: SQL = f"""INSERT INTO {self.gp_branches_table} (gpb_name) VALUES('{branch}');""" cur.execute(SQL) conn.commit() close_db(cur, conn)
def get_change(coin): db = open_db() vals = [] for rec in sorted((db('abbr')==coin.lower()), key=sort_date, reverse=True)[:2]: vals.append(rec.get('sell')) assert len(vals) == 2 latest = vals[0] previous = vals[1] return latest, round(((latest/previous)-1)*100, 2)
def get_positions_ids(): Q = """SELECT if_id FROM table WHERE if_kw_status = 2 ORDER BY if_id LIMIT 50; """ cur, conn = open_db() cur.execute(Q) positions_ids = [item['if_id'] for item in cur] close_db(cur, conn) return positions_ids
async def drink_accept(ctx, user: discord.Member): data = open_db("database/passports.json") data_bar = open_db("database/bar.json") items = open_db("database/items.json") client = str(user) if client in data_bar["order"]: order = data_bar["order"][client] if data[client]["coin"] >= items["drinks"][str(order)]["cost"]: print(order) data[client]["coin"] -= items["drinks"][str(order)]["cost"] data[client]["items"].append(str(order)) data[str( ctx.author)]["coin"] += items["drinks"][str(order)]["cost"] update_db('database/passports.json', data) order2 = data_bar["order"].pop(client) update_db('database/bar.json', data_bar) await ctx.send('Заказ ' + client + 'оформлен(' + str(order2) + ')') else: await ctx.send('У ' + client + ' не достаточно средств') else: await ctx.send('Пользователь ничего не заказывал (Стёпа - дэбил)')
def recreate(): with db.open_db() as conn: with conn.cursor() as cur: cur.execute("SELECT id,files_password FROM users_website") websites = cur.fetchall() os.system( 'docker-compose -f /ssd/container/namelessmc/proxy/docker-compose.yaml exec -T ftp rm /etc/pure-ftpd/passwd/pureftpd.passwd' ) for website in websites: (website_id, password) = website add_account(website_id, password)
async def drink_order(ctx, drink): data = open_db("database/passports.json") data_bar = open_db("database/bar.json") items = open_db("database/items.json") drinks = items["drinks"] user_name = str(ctx.author) user = data[user_name] if drink in drinks: if user["coin"] >= drinks[drink]["cost"]: order = data_bar["order"] if user_name not in data_bar["order"]: order[user_name] = '' order[user_name] = drink update_db("database/bar.json", data_bar) await ctx.send(ctx.author.name + ' заказал ' + drink) return menu = '' for drink_menu in drinks: drink_menu_str = str(drink_menu) menu = menu + drink_menu_str + ': ' + str( drinks[drink_menu_str]["cost"]) + ':pizza:' + '\n' await ctx.send(ctx.author.name + ': \n' + menu)
def create_branch_table(self): cur, conn = open_db() cur.execute(f"""DROP TABLE IF EXISTS `{self.gp_branches_table}`""") SQL = f""" CREATE TABLE `{self.gp_branches_table}` ( `gpb_id` int(11) NOT NULL AUTO_INCREMENT, `gpb_name` varchar(255) NOT NULL, PRIMARY KEY (`gpb_id`), UNIQUE KEY `name` (`gpb_name`) ) ENGINE=InnoDB DEFAULT CHARSET=utf8 COLLATE=utf8_unicode_ci COMMENT='{branch_table_comment}'; """ cur.execute(SQL) conn.commit() close_db(cur, conn)
async def drink_accept(ctx): user = str(ctx.author) data = open_db("database/passports.json") salary = 10 if not data[user]["work"]: data[user]["work"] = True update_db("database/passports.json", data) await ctx.send(ctx.author.name + ' начал работать') await asyncio.sleep(5) data[user]["coin"] += salary await ctx.send(ctx.author.name + ' заработал ' + str(salary)) data[user]["work"] = False update_db("database/passports.json", data) else: await ctx.send(ctx.author.name + ', ты уже работаешь')
async def passport(ctx): data = open_db("database/passports.json") user = str(ctx.author) if user in data: money = data[user]["coin"] items = '' for item in data[user]["items"]: items = items + '```py\n@ ' + item + '\n' + '```' items = items if items != '' else '```py\n @ пусто```' await ctx.send('**Инвентарь ' + ctx.author.name + ':**\n' + items + '\nДеньги: ' + str(money) + ':pizza:') else: data[user] = {'coin': 0, 'item': [], 'work': False} update_db("database/passports.json", data) await ctx.send(ctx.author.name + ' Получил паспорт')
def upload_worker(queue: Queue): cur, conn = open_db() print('Worker started, waiting for data...') SQL, batch = queue.get() fname = batch[0]['bkf_filename'] if batch else None print(f'WORKER: Uploading {fname} to DB....') while batch: old_fname = fname fname = batch[0]['bkf_filename'] if fname != old_fname: print(f'WORKER: Uploading {fname} to DB....') warnings_ = upload_batch(SQL, batch, cur, conn) SQL, batch = queue.get() show_1265_warnings(warnings_) close_db(cur, conn)
rep = report.Report(crawler.host, crawler.urls) #crawler.urls is a tuple rep.send_redis_relations() rep.output_csv_relations() elif url: sTime = time.time() print "Crawling %s (Max Depth: %d)" % (url, depth) crawler = Crawler(url, depth, verbose, extrap, limit) crawler.crawl() eTime = time.time() tTime = eTime - sTime print "Found: %d" % crawler.links #TODO: This number is crawler.self.links and is not correct print "Followed: %d" % crawler.followed print "Stats: (%d/s after %0.2fs)" % (int(math.ceil(float(crawler.links) / tTime)), tTime) rep = report.Report(crawler.host, crawler.urls) #crawler.urls is a tuple rep.send_redis_relations() rep.output_csv_relations() if __name__ == "__main__": #for x in pull_info("linkpart.csv"): print x red_serv = db.open_db() if db.test_redis_open(): main() else: logging.debug("redis server not running")