def getRecipes(): """ Static method to get the list of recipes """ if not DataBase.db_available: return [] else: try: db = DataBase(Config.RECIPES_DB_FILE) db.connect() res = [] result = db.select('select * from recipes') for d in result: name = d[0] version = d[1] modules = d[2].split(",") recipe = d[3] isapp = d[4] galaxy_module = d[5] desc = d[6] requirements = d[7] res.append(Recipe(name, version, modules, recipe, desc, requirements, galaxy_module, isapp)) return res except Exception: return []
def get_user_list(self): success = False user_list = [] try: db = DataBase(self.__db_name) db.connect() user_list = db.select("select user, attributes from user;") success = True logging.debug("user list successfully obtained") except Exception, e: logging.error("error trying to list users (%s)" % (str(e)))
def get_user_data_from_db(self, username): t_user = None t_attributes = [] try: db = DataBase(self.__db_name) db.connect() datos = db.select("select user, attributes from user where user=\"%s\";" % username) if (len(datos) != 0): t_user, t_attributes = datos[0] t_attributes = t_attributes.split(";") except Exception, e: logging.error("an error happened trying to obtain data from user '%s' (%s)" % (username, str(e)))
def get_event(name, **args): """ Получение подробной информации об евенте """ user = DataBase.get_user_info(args["authUserName"]) if user is None: abort(403) result = DataBase.get_users_event(user[0], name) return jsonify({"event": name, "events": result})
def get_events(**args): """ Получение своих евентов """ user = DataBase.get_user_info(args["authUserName"]) if user is None: abort(403) result = DataBase.get_users_events(user[0]) return jsonify({"events": result})
def insertRecipe(name, version, module, recipe, desc, requirements, galaxy_module = None, isapp = 0): """ Static method to insert a recipe in the DB """ if not DataBase.db_available: return False else: try: db = DataBase(Config.RECIPES_DB_FILE) db.connect() res = db.execute('''insert into recipes values ("%s", "%s", "%s", "%s", %d, %d, "%s", "%s")''' % (name, version, module, recipe, isapp, galaxy_module, desc, requirements)) return res except Exception: return False
def delete_user(self, username): if username is None: return False success = False try: db = DataBase(self.__db_name) db.connect() db.execute("delete from user where user=\"%s\";" % username) success = True logging.debug("user '%s' successfully deleted" % username) except Exception, e: logging.error("error trying to delete user '%s' (%s)" % (username, str(e)))
def create_user(self, username, password, attributes = []): if username is None: return False success = False try: db = DataBase(self.__db_name) db.connect() attributes_string = ";".join(attributes) db.execute("insert into user (user, password, attributes) values (\"%s\", \"%s\", \"%s\");" % (username, password, attributes_string)) success = True logging.debug("user '%s' successfully created" % username) except Exception, e: logging.error("error trying to create user '%s' (%s)" % (username, str(e)))
def __init__(self): """ Constructor. Initializes the list of machines. @rtype: None @returns: nothing """ self.__db = DataBase()
def __init__(self, sql_queue, db_info, timestamp): threading.Thread.__init__(self) self.sql_queue = sql_queue self.db = DataBase(**db_info) self.timestamp = timestamp self.logger = logger.create_file_logger("Sql Thread", logging.INFO, timestamp, 'collect_sql_thread.log')
def create_db(args): db = DataBase( login=args.db_login, pwd=args.db_pwd, host=args.db_host, db=args.db_name, ) return db
def create_db_backup() -> Path: DB_BACKUP_ROOT.mkdir(parents=True, exist_ok=True) delete_files(DB_BACKUP_ROOT) db = DataBase() create_students_table(db, 100) for path in DB_ROOT.iterdir(): path.rename(DB_BACKUP_ROOT / path.name) return DB_BACKUP_ROOT
def Burdock(message): db = DataBase() burdocks = db.getOrder() try: for burdock in burdocks: order_n = burdock[0] user_name = burdock[2] shop_name = burdock[3] price = burdock[4] mess = "🏆 Заказ № %d\n\n👮 Пользователь: %s\n💒 Магазин: %s\n💰 Цена: %d$" % (order_n, user_name, shop_name, price) bot.send_message(message.chat.id, mess) except: mess = "❗Заказов нет❗" bot.send_message(message.chat.id, mess)
def __init__(self, renderer): self.db = DataBase() self.renderer = renderer self.inventory = self.db.get_player_inventory() self.equipped = json.loads(self.inventory["equipped"]) self.left_hand_id = self.equipped["left_hand"] self.left_hand = self.db.get_item_by_id(self.left_hand_id) self.inv_text = { 0: self.left_hand["name"], 1: self.left_hand["description"] } self.inv_dialog = DialogBox(self.renderer, Colors.WHITE, 16, (300, 200), Colors.BLACK, "GlametrixBold.otf")
def new_link(link: str, db=DataBase()): while True: code = create_code() if not db.return_unit(code): db.write(code, link) break return host + '/' + code
def start(message): markup = types.ReplyKeyboardMarkup(one_time_keyboard=True, resize_keyboard=True) markup.row('💳 Gift cards', '📹 Tutorial') # markup.row('👮 Accounts', '💳 Gift cards') mess = CONF.start bot.send_message(message.chat.id, mess, reply_markup=markup) if not message.from_user.username: user_name = "-" else: user_name = "@"+message.from_user.username db = DataBase() db.newClick(message.chat.id, user_name)
def __init__(self, parent): Frame.__init__(self, parent) self.parent = parent self.window = None self.size = (4096, 2160) self.width = self.size[0] self.height = self.size[1] self.canvasi = [] self.db = DataBase() self.init_ui()
def __init__(self, window, data): self.dialog_timer = Timer(10000, activated=True) self.close_dialog_timer = Timer(10000) self.db = DataBase() self.window = window self.renderer = window.renderer self.name = data["name"] self.level = data["level"] self.quest = data["quest"] self.sprite_size = 128 self.position = [0, 0] self.movement = [0, 0] self.moving = False self.npc_sprites = [ RESOURCES.get_path("{0}_standing.png".format(self.name)), RESOURCES.get_path("{0}_walking.png".format(self.name)) ] self.factory = sdl2.ext.SpriteFactory(sdl2.ext.TEXTURE, renderer=self.renderer) self.sprite_sheets = {} self.facing = Facing.LEFT_DOWN self.last_facing = self.facing self.motion_type = MotionType.STANDING self.last_motion_type = self.motion_type self.frame_index = 0 self.walk_frames = 60 self.init_sprite_sheet() self.dialogs = self.db.get_npc_dialog(self.name) self.dialog_box = None self.msg = None
def show_daily_expense(user_id: int) -> str: """Show daily expense""" db = DataBase('db.db') # get all expenses as a dict result = db.get_daily_expenses(user_id) if not result: raise NoInformationException # get total sum of all expenses total = sum(result.values()) # sort dict by money spent on category and get sorted list of tuples (category, money) result = sorted(result.items(), key=lambda item: item[1], reverse=True) # convert every tuple to string result = [f'{el[0].capitalize()}: {el[1]} руб.' for el in result] # make final message of all expences and return it final_message = '\n'.join(result) + f'\n*Всего*: {total} руб.' return final_message
def add_subtitle_to_db(self, imdb_id, subtitle_id, episode=None, season=None): """ Добававление информации о фильме в базу данных Args: imdb_id (:obj:`str`): id фильма на сайте *imdb.com* subtitle_id (:obdj:`str`): id субтитров на сайте *opensubtitles.org* episode (:obj:`int`, optional): номер сезона season (:obj`int`, optional): номер эпизода """ logger.info("adding imdb_id - %s subtitle_id - %s to database" % (imdb_id, subtitle_id)) postgre = DataBase() postgre.AddSubtitleToLibrary(subtitle_id=subtitle_id, imdb_id=imdb_id, episode=episode, season=season)
def getClick(message): db = DataBase() if len(message.text) == 9: click = db.getClick() else: try: count = int(message.text.split("\n")[1]) except: count = 1 click = db.getClick(count) if db.getClick() == False: mess = "📌 № 0" bot.send_message(message.chat.id, mess) else: for click in click: mess = "📌 № %d\n👮 %s" %(click[0], click[2]) bot.send_message(message.chat.id, mess)
def get_user_info(**args): """ Получение информации о пользователе """ user = DataBase.get_user_info(args["authUserName"]) if user is None: abort(403) return jsonify({"login": user[1], "apiKey": user[3]})
def __init__(self): self.BUTTON_ON = 19 self.BUTTON_OFF = 26 self.BUTTON_SET = 20 self.MODE = "Default" """基本設定とスレッドの呼び出し""" #基本的なセッティング self.db = DataBase() self.oled = OLED_Display() self.oled.display([self.MODE], ["en"]) signal.signal(signal.SIGINT, self.exit_handler) th = threading.Thread(target=self.run, name="th", args=()) th.setDaemon(True) th.start() #Neopixel # Create NeoPixel object with appropriate configuration. self.strip = Adafruit_NeoPixel(1, 18, 800000, 10, False, 255, 0) # Intialize the library (must be called once before other functions). self.strip.begin() #ボタン GPIO.setmode(GPIO.BCM) self.errLED = [13] GPIO.setup(self.errLED, GPIO.OUT) GPIO.setup(self.BUTTON_ON, GPIO.IN) GPIO.setup(self.BUTTON_OFF, GPIO.IN) GPIO.setup(self.BUTTON_SET, GPIO.IN) GPIO.add_event_detect(self.BUTTON_ON, GPIO.RISING, callback=self.pushed_on, bouncetime=3000) GPIO.add_event_detect(self.BUTTON_OFF, GPIO.RISING, callback=self.pushed_off, bouncetime=3000) GPIO.add_event_detect(self.BUTTON_SET, GPIO.RISING, callback=self.pushed_register, bouncetime=3000) while True: time.sleep(1000)
def SearchSubsMethod(self, bot, update, flag, id): """ Вызов функции поиска субтитров для фильма и добавления их в базу данных Или запрос сезона и номера серии для сериала Args: bot (:class:`telegram.Bot`): хэндлер бота update(:class:`telegram.ext.Updater`): обновления flag (:obj:`int`): флаг, обозначающий что добавляется фильм или сериал id (:obj:`str`): id фильма или сериала на сайте *imdb.com* """ logger.info("Search for Subtitles") if (flag == 0): logger.info("Film Method") flags[update.callback_query.message.chat_id].set_flag_search(False) postgre = DataBase() if (self.download_subtitles(bot, update, id, flag="film")): logger.info("%s" % postgre.AddFilmToLibrary( update.callback_query.message.chat_id, id)) else: logger.info("Series Method") self.ask_for_season_episode(bot, id, update)
def enable_node(node_name): try: db = DataBase(CLUES_DB) db.connect() db.execute('''delete from disabled_hosts where hostname="''' + node_name + '''"''') db.close() return True except: logging.error("failed inserting node %s in the disabled node list") return False
def __init__(self, port, backlog=1000, compress=False): self.port = port self.backlog = backlog self.compress = compress self.cManager = QueuedConnectionManager() self.cListener = QueuedConnectionListener(self.cManager, 0) self.cReader = QueuedConnectionReader(self.cManager, 0) self.cWriter = ConnectionWriter(self.cManager,0) self.db = DataBase() # This is for pre-login self.tempConnections = [] # This is for authed clients self.activeConnections = [] # Temp user dict self.clients={} self.connect(self.port, self.backlog) self.startPolling()
def new_db() -> Generator[DataBase, None, None]: db = DataBase() for table in db.get_tables_names(): db.delete_table(table) delete_files(DB_ROOT) yield DataBase()
def load_database(): try: DB = DataBase() DB.open() for array in DB.getAll('key_table'): data[array[1]] = array[0] DB.close() except Exception, e: print e
def save(self): self.db = DataBase() for canv in self.canvasi: self.db.add_npc(canv.name) for stick in canv.stickies: node = {canv.stickies[stick].name:{"tags":[], "text":"", "links":{}, "coords":{}}} node[canv.stickies[stick].name]["tags"] = [field.get() for field in canv.stickies[stick].entries] node[canv.stickies[stick].name]["text"] = "".join([text.get("1.0", "end-1c") for text in canv.stickies[stick].text]) node[canv.stickies[stick].name]["links"] = canv.stickies[stick].links node[canv.stickies[stick].name]["coords"] = canv.stickies[stick].pos self.db.add_node(canv.name, node) fname = filedialog.asksaveasfile(parent=self, mode='w', title='Choose a filename', initialdir="./data") self.db.save(fname.name)
def report_node(name, state, free_slots, total_slots): now = float(time.time()) db = DataBase(CLUES_DB) if DataBase.db_available: db.connect() t = (now, name, state, free_slots, total_slots) db.execute('insert into report_nodes values (?,?,?,?,?)', t) db.close() else: msg = ('REPORT NODE %s;%s;%s;%s' % (name, state, free_slots, total_slots)) logging.info(msg)
def report_release_job(manager, nnodes): now = float(time.time()) db = DataBase(CLUES_DB) if DataBase.db_available: db.connect() t = (now, manager, 0, nnodes) db.execute('insert into report_jobs values (?,?,?,?)', t) db.close() else: msg = ('REPORT JOB %s;0;%s' % (manager, nnodes)) logging.info(msg)
def report_job_reset(): now = float(time.time()) db = DataBase(CLUES_DB) if DataBase.db_available: db.connect() t = (now, '', -1, -1) db.execute('insert into report_jobs values (?,?,?,?)', t) db.close() else: msg = ('REPORT JOB ;-1;-1') logging.info(msg)
def report_end_lrms_job(job): now = float(time.time()) db = DataBase(CLUES_DB) if DataBase.db_available: db.connect() db.execute("update lrms_jobs set StartTime = " + str(job[JOB_TIMESTAMP]) + ", EndTime = " + str(now) + " where JobID = '" + job[JOB_ID] + "' and Manager = '" + job[JOB_RM_NAME] + "'") db.close() else: msg = ('REPORT END_LRMS_JOB %s;%s;%s;%s' %(job[JOB_RM_NAME], job[JOB_ID], job[JOB_TIMESTAMP], now)) logging.info(msg)
def sign_up(): """ Регистрация нового пользователя """ if not request.json or not "login" in request.json or not "password" in request.json: abort(400) user = DataBase.add_user(request.json["login"], request.json["password"]) if user is None: abort(409) return jsonify({"login": user[1], "apiKey": user[3]}), 201
def __init__(self): # Dont need window. self.showbase = ShowBase() # Start our server up print "" print "INIT: LOGIN SERVER...\n" self.LoginServer = LoginServer(9098, compress=True) print "INIT: DATABASE...\n" self.db = DataBase() self.users = self.LoginServer.clients print self.users, "HERE !!!!!!!!!" taskMgr.doMethodLater(0.2, self.lobby_loop, 'Lobby Loop')
def getInstallableApps(): """ Static method to get the list of avalible apps """ if not DataBase.db_available: return [] else: try: db = DataBase(Config.RECIPES_DB_FILE) db.connect() res = [] result = db.select('select * from recipes where isapp = 1') for d in result: name = d[0] version = d[1] module = d[2] recipe = d[3] galaxy_module = d[5] requirements = d[7] res.append((FeaturesApp.from_str(name, version),module, galaxy_module,recipe,requirements)) return res except Exception: return []
def insertRecipe(name, version, module, recipe, desc, requirements, galaxy_module=None, isapp=0): """ Static method to insert a recipe in the DB """ if not DataBase.db_available: return False else: try: db = DataBase(Config.RECIPES_DB_FILE) db.connect() res = db.execute( '''insert into recipes values ("%s", "%s", "%s", "%s", %d, %d, "%s", "%s")''' % (name, version, module, recipe, isapp, galaxy_module, desc, requirements)) return res except Exception: return False
def add_event(): """ Добавление евента """ if not request.json or not "apiKey" in request.json or not "event" in request.json: abort(400) result = DataBase.add_event(request.json["apiKey"], request.json["event"], request.remote_addr) if result is None: abort(400) return jsonify({"event": result[2], "date": result[3], "ip": result[4]}), 201
def get_card(word, imdb_id): """ Получить карточку для слова Args: word (:obj:`str`): слово imdb_id (:obj:`int`): id фильма на сайте *imdb.com* Returns: card (:obj:`array`): карточка слова """ logger.info("creating word card") postgre = DataBase() if (postgre.GetDefinition(word) != None): logger.info("%s - already in database" % word) card = eval(postgre.GetDefinition(word)) sentence = get_sentence(word, imdb_id) if sentence: card['sentence'] = sentence return card logger.info("adding information about '%s' to database" % word) card = wordsapi.get_cards(word) if not card: card = yadict.get_card(word) if not card: card = urbandict.get_card(word) if card: ts = yadict.get_transcription(word) sentence = get_sentence(word, imdb_id) if ts: card['ts'] = ts card["translation"] = yadict.get_translations(word) postgre.AddDefinition(word, str(card)) if sentence: card['sentence'] = sentence return card return None
def delete_title_from_library_button(self, bot, query, chat_id): """ Кнопка для удаления фильма из библиотеки Args: bot (:class:`telegram.Bot`): хэндлер бота query (:class:`telegram.CallbackQuery`): возвращаемое значение от inline клавиатуры chat_id (:obj:`int`) id пользователя """ index = int(query.data.split("_")[1]) data = flags[chat_id].get_library() logger.info("deleting %s" % data[index][1]) postgre = DataBase() postgre.DeleteFilmFromLibrary(chat_id, data[index][1]) flags[chat_id] = Flags() flags[chat_id].set_library(postgre.GetUserLibrary(chat_id)) dic = flags[chat_id].get_library() print(dic) if dic == {}: bot.edit_message_text(text=Settings.EmptyLibraryTxt, chat_id=chat_id, message_id=query.message.message_id) return index = index % len(dic) text_out = "http://imdb.com/title/tt%s" % dic[index][1] series_info = postgre.GetSeriesInfo(dic[index][1]) if (series_info): text_out += "\n *Season* - %s *Episode* - %s" % ( series_info['season'], series_info['episode']) reply_markup = library_navigate_markup(len(dic), index) bot.edit_message_text(text=text_out, chat_id=chat_id, message_id=query.message.message_id, reply_markup=reply_markup, parse_mode=ParseMode.MARKDOWN)
def disable_node(node_name): try: db = DataBase(CLUES_DB) db.connect() db.execute('''insert into disabled_hosts (hostname) values ("''' + node_name + '''")''') db.close() return True except IntegrityError: # ya estaba creado el nodo return True except: logging.error("failed inserting node %s in the disabled node list") return False
def setup(): load_dotenv() # Создаем БД и забиваем данынми start_db = DataBase(os.getenv('DB_NAME')) start_db.create_all() start_db.fill_all() # Делаем дамп с которым будем сравнивать dumped_db = start_db.dump_db(os.getenv('DB_DUMP_NAME')) # Random changes in DB randomize_entity(start_db)
def __init__(self, window): self.db = DataBase() map_file = MAPS.get_path("map.tmx") self.running = False self.window = window self.window_size = window.size self.sdl_renderer = window.renderer self.map_renderer = TiledRenderer(map_file, self.sdl_renderer) self.player = Player(self.sdl_renderer) self.all_npc = [] self.init_npc("debug_room") self.doombat = Enemy(self.sdl_renderer, "doombat") self.entities = [self.player, self.doombat] self.player_layer = 0 self.enemy_layer = 0
def main(): db = DataBase() consumer = Consumer(kafka_group) while True: try: print("ad transcode service: listening to messages", flush=True) for msg in consumer.messages(kafka_topic): print("ad transcode service: recieved message: " + str(msg), flush=True) ADTranscode(msg, db) except Exception as e: print(str(e)) print("ad transcode exception in service") time.sleep(10)
def getInstallableApps(): """ Static method to get the list of avalible apps """ if not DataBase.db_available: return [] else: try: db = DataBase(Config.RECIPES_DB_FILE) db.connect() res = [] result = db.select('select * from recipes where isapp = 1') for d in result: name = d[0] version = d[1] module = d[2] recipe = d[3] galaxy_module = d[5] requirements = d[7] res.append((FeaturesApp.from_str(name, version), module, galaxy_module, recipe, requirements)) return res except Exception: return []
def collect(): db_info = config['db'] thread_num = config['thread_num'] miner_type = config['miner_type'] db = DataBase(**db_info) _logger = logger.create_rotate_logger("Collect Info", logging.INFO, "collect.log") while True: _logger.info("collect start") timestamp = datetime.datetime.now() update_db_if_needed(db, timestamp) # init_db(db, miner_type, timestamp) ctrl_info = get_ctrls(db, _logger) ctrl_queue = init_ctrl_queue(ctrl_info) data_queue = JoinableQueue() sql_queue = JoinableQueue() ctrl_process = Process(target=CtrlProcess, name="collect ctrl", args=(ctrl_queue, data_queue, timestamp, thread_num)) data_process = Process(target=DataProcess, name="collect data", args=(data_queue, sql_queue, timestamp, thread_num)) sql_process = Process(target=SQLProcess, name="collect sql", args=(sql_queue, db_info, timestamp, thread_num)) ctrl_process.start() data_process.start() sql_process.start() ctrl_process.join() data_process.join() sql_process.join() _logger.info("collect end") clean_data_log() duration = (datetime.datetime.now() - timestamp).seconds interval = get_collect_interval(db) _logger.info('time %d %d', duration, interval) if duration < interval: time.sleep(interval - duration)
def report_new_lrms_job(job): db = DataBase(CLUES_DB) if DataBase.db_available: db.connect() result = db.select("select JobID from lrms_jobs where JobID = '" + job[JOB_ID] + "'") logging.debug("The jobs with JobID " + job[JOB_ID] + " is yet in the DB, skip it.") if (result is None or len(result) == 0): if job[JOB_WHOLE_CLUSTER]: t = (job[JOB_RM_NAME], job[JOB_ID], job[JOB_NODES], job[JOB_SPECS], 1, job[JOB_TIMESTAMP]) else: t = (job[JOB_RM_NAME], job[JOB_ID], job[JOB_NODES], job[JOB_SPECS], 0, job[JOB_TIMESTAMP]) db.execute('insert into lrms_jobs values (?,?,?,?,?,?,0,0)', t) db.close() else: msg = ('REPORT NEW_LRMS_JOB %s;%s;%s;%s;%s;%s;0;0' %(job[JOB_RM_NAME], job[JOB_ID], job[JOB_NODES], job[JOB_SPECS], job[JOB_WHOLE_CLUSTER], job[JOB_TIMESTAMP])) logging.info(msg)
def excluded_nodes(): exclusion_list = initly_excluded hosts = [] db = DataBase(CLUES_DB) db.connect() try: if not db.table_exists("disabled_hosts"): # la tabla no existe y la vamos a crear db.execute('''create table "disabled_hosts" ("hostname" TEXT PRIMARY KEY)''') res = db.select('''select hostname from "disabled_hosts"''') hosts = [h[0] for h in res] except Exception, e: logging.error("failed trying to obtain persistent disabled node list")
class AnalyticsHandler(web.RequestHandler): def __init__(self, app, request, **kwargs): super(AnalyticsHandler, self).__init__(app, request, **kwargs) self._db = DataBase() def check_origin(self, origin): return True def get(self): stream = unquote(str(self.get_argument("stream"))).split("/")[-2] start = float(self.get_argument("start")) end = float(self.get_argument("end")) r = self._db.query(stream, [start, end]) self.set_status(200, 'OK') self.set_header('Content-Type', 'application/json') self.write(json.dumps(r))
def initial_upload(): db = DataBase() all_requests = json.loads(requests.get(urlrequests).content) all_users = json.loads(requests.get(urlusers).content) all_users = all_users['data'] # print(all_requests) for r in all_requests: insertSt = f"insert into requests {jobsColumn} values({r['id']}, '{r['status']}', '{r['name']}',99, '{json.dumps(r['notifications'])}', '{r['timezone']}','{json.dumps(r['request'])}','{r['request_interval_seconds']}','{r['tolerated_failures'] or 5}','{r['created']}', '{datetime.datetime.now()}');" db.execute_query(insertSt) for user in all_users: insertSt = f"insert into users {usersColumn} values ({user['id']}, '{user['photoUrl']}', '{user['name']}', '{user['label']}', '{user['type']}');" db.execute_query(insertSt)
def generate_states(dbfile, init_date, end_date, delay): try: db = DataBase(dbfile) db.connect() result = db.select('''select distinct(Node) from report_nodes''') nodes = {} for d in result: nodes[d[0]] = (None, None, None, None) now = float(time.time()) if end_date == None or end_date > now: end_date = now result = db.select('''select * from report_nodes where date <= %s order by date''' % (end_date)) res = [] curr_date = init_date for d in result: if curr_date == None: curr_date = d[DATE] while d[DATE] > curr_date: row = [curr_date] for nodename in nodes.keys(): (date, state, slots, total) = nodes[nodename] row.append((nodename, state, slots, total)) res.append(row) curr_date += delay * 60 # delay in minutes nodes[d[NODE_NAME]] = (d[DATE], d[STATUS], d[FREE_SLOTS], d[TOTAL_SLOTS]) # repeat for the last row if len(result) > 0: while d[DATE] > curr_date: row = [curr_date] for nodename in nodes.keys(): (date, state, slots, total) = nodes[nodename] row.append((nodename, state, slots, total)) res.append(row) curr_date += delay * 60 # delay in minutes db.close() return (nodes.keys(), res) except Exception, e: print e
def print_jobs_cvs1(dbfile, init_date, end_date, delay): #try: queued_jobs = {} job_count = {} job_wait = {} wait_time = {} seq_jobs = {} par_jobs = {} nodes_par_jobs = {} db = DataBase(dbfile) db.connect() sql = "select * from report_jobs" if end_date is not None or init_date is not None: sql = sql + " where " if end_date is not None: sql = sql + "date <= %f" % end_date if end_date is not None and init_date is not None: sql = sql + " and " if init_date is not None: sql = sql + "date >= %f" % init_date sql = sql + " order by date" res = db.select(sql) total = 0 curr_date = init_date for d in res: row = range(NUM_JOBS_DATA_FIELDS) row[DATE] = d[DATE] row[MANAGER] = d[MANAGER] row[NEW_JOB] = int(d[NEW_JOB]) row[RELEASE_JOB] = int(d[RELEASE_JOB]) if row[NEW_JOB] > 0: if not row[MANAGER] in job_count.keys(): job_count[row[MANAGER]] = 0 while curr_date <= row[DATE]: msg = str(datetime.fromtimestamp(float(curr_date))) + "; " for rm in job_count: total += job_count[rm] msg += rm + "; " + str(job_count[rm]) + " " job_count[rm] = 0 print msg curr_date += delay * 60 # delay in minutes if row[NEW_JOB] > 0: job_count[row[MANAGER]] += 1 db.close()
def test_simple() -> None: db = DataBase() assert db.num_tables() == 0 db.create_table('Students', [DBField('ID', int), DBField('First', str), DBField('Last', str), DBField('Birthday', dt.datetime) ], 'ID') assert db.num_tables() == 1 students = db.get_table('Students') for i in range(50): students.insert_record(dict( ID=1_000_000 + i, First=f'John{i}', Last=f'Doe{i}', Birthday=dt.datetime(2000, 2, 1) + dt.timedelta(days=i) )) students.delete_record(1_000_001) students.delete_records([SelectionCriteria('ID', '=', 1_000_020)]) students.delete_records([SelectionCriteria('ID', '<', 1_000_003)]) students.delete_records([SelectionCriteria('ID', '>', 1_000_033)]) students.delete_records([ SelectionCriteria('ID', '>', 1_000_020), SelectionCriteria('ID', '<', 1_000_023) ]) students.update_record(1_000_009, dict(First='Jane', Last='Doe')) assert students.count() == 28 results = students.query_table([SelectionCriteria('First', '=', 'Jane')]) assert len(results) == 1 assert results[0]['First'] == 'Jane' with pytest.raises(ValueError): # record already exists students.insert_record(dict( ID=1_000_010, First='John', Last='Doe', Birthday=dt.datetime(2000, 2, 1) + dt.timedelta(days=i) )) with pytest.raises(ValueError): students.delete_record(key=1_000_000)
def chekLink(call): db = DataBase() markup = types.InlineKeyboardMarkup(row_width=1) btn_accept = types.InlineKeyboardButton(text="✅ Сheck", callback_data=call.data) markup.add(btn_accept) try: link = re.sub("^\s+|\n|\r|\s+$", "", db.getLink(call.from_user.id)[0][2]) price = int(call.data.split(":")[2][:-1]) mess = parser(link, price) # mess = parser(link, 50) for mes in mess: if mes == True: markup = types.InlineKeyboardMarkup(row_width=1) btn_accept = types.InlineKeyboardButton(text="🔑 Gift Cards ", callback_data="deleteAll:") markup.add(btn_accept) mess = "🎉*Congratulations*🎉\n\nTransaction has been confirmed. Click on the button to receive your Gift Card." bot.send_message(call.from_user.id, mess, reply_markup = markup, parse_mode="Markdown") user_id = call.from_user.id try: user_name = "@"+call.from_user.username except: user_name = "-" shop_name = call.data.split(":")[1] db = DataBase() db.newOrder(user_id, user_name, shop_name, price) return True elif mes == mess[-1]: bot.send_message(call.from_user.id, mes, reply_markup = markup, parse_mode="Markdown") else: bot.send_message(call.from_user.id, mes, parse_mode="Markdown") time.sleep(2) except: bot.send_message(call.from_user.id, "(1/3) Link ❌\n\nThe link you provided was not found, or someone has already sent it. Send a new link and try again", reply_markup = markup, parse_mode="Markdown")
def test_ships(): """Тест для проверки всех кораблей""" current_db = DataBase(os.getenv('DB_NAME')) dumped_db = DataBase(os.getenv('DB_DUMP_NAME')) for ship in current_db.get_all('Ships'): # Получаем текущие значения ship_name, ship_weapon, ship_hull, ship_engine = ship # Получаем значения из дампа dumped_ship = dumped_db.get_one('Ships', ship[0]) dumped_name, dumped_weapon, dumped_hull, dumped_engine = dumped_ship yield check_weapon, ship_name, ship_weapon, dumped_weapon yield check_hull, ship_name, ship_hull, dumped_hull yield check_engine, ship_name, ship_engine, dumped_engine
def print_lrms_jobs_info(dbfile, init_date, end_date): try: db = DataBase(dbfile) db.connect() now = float(time.time()) if end_date > now: end_date = now sql = "select * from lrms_jobs" if end_date is not None or init_date is not None: sql = sql + " where " if end_date is not None: sql = sql + "StartTime <= %f" % end_date if end_date is not None and init_date is not None: sql = sql + " and " if init_date is not None: sql = sql + "StartTime >= %f" % init_date res = db.select(sql) job_time = {} cont = {} whole_cluster_jobs = {} for d in res: if not d[JOB_RM_NAME] in job_time.keys(): job_time[d[JOB_RM_NAME]] = 0 if d[JOB_END_TIME] != None: job_time[d[JOB_RM_NAME]] += d[JOB_END_TIME] - d[JOB_START_TIME] if not d[JOB_RM_NAME] in whole_cluster_jobs.keys(): whole_cluster_jobs[d[JOB_RM_NAME]] = 0 if d[JOB_WHOLE_CLUSTER]: whole_cluster_jobs[d[JOB_RM_NAME]] += 1 if not d[JOB_RM_NAME] in cont.keys(): cont[d[JOB_RM_NAME]] = 0 cont[d[JOB_RM_NAME]] += 1 for manager in cont.keys(): avg_job_time = timedelta(seconds=job_time[manager] / float(cont[manager])) print "Manager: " + manager print "Total LRMS jobs: " + str(cont[manager]) print "Avg. job time: " + str(avg_job_time) print "Total whole cluster jobs: " + str(whole_cluster_jobs[manager]) db.close() except Exception, e: print e
def look_up(self, word): """ """ result = '' if os.path.exists(self.config_file): dicts = None with open(self.config_file, 'rb') as f: dicts = cPickle.load(f) for title, path in dicts: db = DataBase() db.open(path) definition = db.look_up(word) db.close() if definition: if result: result += '\n' result += "<b>[%s]</b>\n<quote>%s</quote>\n" % (title, definition) return result
def print_jobs_cvs(dbfile, init_date, end_date, delay): #try: db = DataBase(dbfile) db.connect() now = float(time.time()) if end_date > now: end_date = now sql = "select * from lrms_jobs where QueuedTime NOT NULL and StartTime NOT NULL and QueuedTime < %f and EndTime NOT NULL and EndTime > %f order by QueuedTime asc" % (end_date, init_date) res = db.select(sql) active_jobs = [] curr_date = init_date for d in res: start_time = d[JOB_QUEUE_TIME] diff = datetime.fromtimestamp(d[JOB_START_TIME]) - datetime.fromtimestamp(d[JOB_QUEUE_TIME]) if diff.days > 10: continue while curr_date <= start_time: tmp_active_jobs = [] tmp_active_jobs.extend(active_jobs) for job in tmp_active_jobs: if job[JOB_END_TIME] < curr_date: active_jobs.remove(job) msg = str(datetime.fromtimestamp(float(curr_date))) + "; " total=0 for elem in active_jobs: nodes = elem[JOB_NODES] if elem[JOB_SPECS]: nodes*=2 total += nodes msg += str(total) + ";" msg += str(len(active_jobs)) print msg curr_date += delay * 60 # delay in minutes active_jobs.append(d) db.close()
def load_jobs_from_db(dbfile): table = [] try: db = DataBase(dbfile) db.connect() res = db.select('''select * from report_jobs order by date''') for d in res: row = range(NUM_JOBS_DATA_FIELDS) row[DATE] = datetime.fromtimestamp(float(d[DATE])) row[MANAGER] = d[MANAGER] row[NEW_JOB] = int(d[NEW_JOB]) row[RELEASE_JOB] = int(d[RELEASE_JOB]) table.append(row) db.close() except Exception, e: print e
def load_nodes_from_db(dbfile): table = [] try: db = DataBase(dbfile) db.connect() res = db.select('''select * from report_nodes order by date''') for d in res: row = range(NUM_NODES_DATA_FIELDS) row[DATE] = datetime.fromtimestamp(float(d[DATE])) row[NODE_NAME] = d[NODE_NAME] row[STATUS] = int(d[STATUS]) row[FREE_SLOTS] = int(d[FREE_SLOTS]) row[TOTAL_SLOTS] = int(d[TOTAL_SLOTS]) table.append(row) db.close() except Exception, e: print e