def __init__(self, initial_vars=None) -> None: self.total_self_progress = 0 # if > 0 output progress during run (as apposed to batch file progress) self.the_command = None self.fixed_command = None DBManager.__init__(self) IndexYamlReaderBase.__init__(self, config_vars) self.path_searcher = utils.SearchPaths(config_vars, "__SEARCH_PATHS__") self.url_translator = connectionBase.translate_url self.init_default_vars(initial_vars) # noinspection PyUnresolvedReferences self.read_defaults_file(super().__thisclass__.__name__) # initialize the search paths helper with the current directory and dir where instl is now self.path_searcher.add_search_path(Path.cwd()) self.path_searcher.add_search_path( Path(config_vars["__ARGV__"][0]).resolve()) self.path_searcher.add_search_path( config_vars["__INSTL_DATA_FOLDER__"].Path()) self.batch_accum = PythonBatchCommandAccum() self.dl_tool = CUrlHelper() self.out_file_realpath = None self.internal_progress = 0 # progress of preparing installer NOT of the installation self.num_digits_repo_rev_hierarchy = None self.num_digits_per_folder_repo_rev_hierarchy = None self.update_mode = False self.python_batch_names = PythonBatchCommandBase.get_derived_class_names( )
def init_from_cmd_line_options(self, cmd_line_options_obj): """ turn command line options into variables """ if "__MAIN_COMMAND__" in config_vars: self.the_command = str(config_vars["__MAIN_COMMAND__"]) self.fixed_command = self.the_command.replace('-', '_') # to do: in python3.8 with the new sqlite.backup function, memory database # can be writen to disk if needed if getattr(sys, 'frozen', False ) or self.the_command in self.commands_that_need_memory_db: config_vars['__MAIN_DB_FILE__'] = ':memory:' DBManager.set_refresh_db_file( self.the_command in self.commands_that_need_to_refresh_db_file) if hasattr(cmd_line_options_obj, "subject") and cmd_line_options_obj.subject is not None: config_vars["__HELP_SUBJECT__"] = cmd_line_options_obj.subject else: config_vars["__HELP_SUBJECT__"] = "" if cmd_line_options_obj.which_revision: config_vars[ "__WHICH_REVISION__"] = cmd_line_options_obj.which_revision[0] if cmd_line_options_obj.define: individual_definitions = cmd_line_options_obj.define[0].split(",") for definition in individual_definitions: name, value = definition.split("=") config_vars[name] = value self.get_default_out_file()
def init_from_cmd_line_options(self, cmd_line_options_obj): """ turn command line options into variables """ if "__MAIN_COMMAND__" in config_vars: self.the_command = str(config_vars["__MAIN_COMMAND__"]) self.fixed_command = self.the_command.replace('-', '_') DBManager.set_refresh_db_file( self.the_command in self.commands_that_need_to_refresh_db_file) if hasattr(cmd_line_options_obj, "subject") and cmd_line_options_obj.subject is not None: config_vars["__HELP_SUBJECT__"] = cmd_line_options_obj.subject else: config_vars["__HELP_SUBJECT__"] = "" if cmd_line_options_obj.which_revision: config_vars[ "__WHICH_REVISION__"] = cmd_line_options_obj.which_revision[0] if cmd_line_options_obj.define: individual_definitions = cmd_line_options_obj.define[0].split(",") for definition in individual_definitions: name, value = definition.split("=") config_vars[name] = value self.get_default_out_file()
class WhoisStore(object): """Class for storing metadata about schools that we can check before we go directly to the XML (and other types) data store.""" def __init__(self, dbManager = None): # Call methods for creating environments and DB object # Setup our DBManager object if (dbManager is None): self.dbManager = DBManager() else: self.dbManager = dbManager # Try and get our current whois store self.whois = self.dbManager.get("whois") if (self.whois is None): self.dbManager.put("whois", {}) self.whois = {} def _getEduWHOIS(self, domain, whoisServer = "whois.educause.net", port = 43): # Make sure we're only looking up the root domain, not the entire domain name print "domain: " + domain domainSplit = domain.split(".") host = domainSplit[-2] + "." + domainSplit[-1] s = socket.socket(socket.AF_INET, socket.SOCK_STREAM) s.connect((whoisServer, port)) s.send(host + "\r\n") response = "" while True: data = s.recv(4096) response += data if data == '': break s.close() responseSplit = response.split("\n") try: registrantIndex = responseSplit.index("Registrant:") result = responseSplit[registrantIndex + 1].strip() except ValueError: return None # HEINOUS, but the WHOIS entry doesn't help us here if (domain == "umich.edu"): result = "University of Michigan" return result def getSchoolName(self, hostname): print "hostname: " + hostname try: return self.whois[hostname] except KeyError: schoolName = self._getEduWHOIS(hostname) self.whois[hostname] = schoolName self.dbManager.put("whois", self.whois) return schoolName
def __init__(self, cities): super(MeetupStatisticsFetcher, self).__init__() DBManager.init(1) self.cursor = DBManager.get_cursor() self.cities = cities self.logger = utilities.logger_with_name("StatisticsFetcher") self.query, self.title = "", "" self.results = dict()
def start(self): db = DBManager(self.filename) self.combo['values'] = () for row in db.get_tables_names(): self.combo['values'] += row self.combo.current(0) cols = db.get_columns_names(self.combo.get()) curs = db.get_rows(self.combo.get()) self.tree_update(curs, cols)
def save_table_to_db(table): db_mgr = DBManager() array = [] for row in table: fr = int(row['fr']) to = int(row['to']) re = row['result'] if isinstance(re, unicode): # re.replace(u"\u2018", "'").replace(u"\u2019", "'") re = str(re.encode(encoding='utf-8', errors='replace')) print('type of re in db saving is: ' + str(type(re))) re = re.lower() ide = row['identifier'] if isinstance(ide, unicode): ide = str(ide.encode(encoding='utf-8', errors='replace')) leads_to = None try: leads_to = row['leads_to'] except KeyError: pass array.append({ 'identifier': ide, 'fr': fr, 'to': to, 're': re, 'table': row['table_name'], 'leads_to_table': leads_to }) db_mgr.fuzion_tables.add_many((array))
def save(self, actor_name, actor_role): db_mgr = DBManager() db_mgr.table_primary_stats.save_character(character_name=actor_name, role=actor_role, intelligence=self.intelligence, reflexes=self.reflexes, technique=self.technique, dexterity=self.dexterity, presense=self.presense, willpower=self.willpower, constitution=self.constitution, strength=self.strength, body=self.body, move=self.move)
def __init__(self): ''' Initializes the protobuf server by setting up a database manager object for the server to communicate with. :return: ProtobufServer object ''' self.db = DBManager()
def run(self): self.database = DBManager('client') self.notify('init_db') try: asyncio.run(self.async_start()) except Exception as e: logger.error(e, exc_info=True) # import ipdb; ipdb.set_trace() self.notify('fail', msg=str(e))
def run_sql(self): query = self.entry.get() db = DBManager(self.filename) try: curs = db.query(query) cols = list(map(lambda x: x[0], curs.description)) self.tree_update(curs, cols) except Exception as e: self.error_window = Toplevel(self.master) self.error_window.grab_set() self.error_window.title('Что-то пошло не так :(') self.error_window.minsize(320, 48) self.error_window.resizable(0, 0) label = ttk.Label(self.error_window, text=e) label.pack(side="top", fill="both", padx=5, pady=5) ok = ttk.Button(self.error_window, text='Ок', command=self.error_window.destroy) ok.pack(side="right", padx=5, pady=5)
def __init__(self, **kwargs): if self.STANDINGS is None: raise Exception('You must set the standings dictionary first') self.name = kwargs.get('name') #self.teams = kwargs.get('teams') self.wins = 0 self.losses = 0 self.ties = 0 self.db = DBManager() self.teams = self.sort_by_rec(kwargs.get('teams')) self.fill_in_record()
def load(self, name): db_mgr = DBManager() table = None if db_mgr.fuzion_tables.count_options(name) > 0: table = db_mgr.fuzion_tables.get_table(name) else: table = utilities.get_aws_table(name) utilities.save_table_to_db(table) table = db_mgr.fuzion_tables.get_table(name) if table is not None: for row in table: self.add_option(row.fr, row.to, row.re, leads_to=row.leads_to_table, identifier=row.identifier)
def download_video(update, context): db = DBManager() url = update.message.text chat_id = update.message.chat_id quality = db.get_chat_quality(chat_id) username = str(update.message.from_user.username) if is_url(url) is False: update.message.reply_text('Enter Url please') return file_id = db.get_url_file_id(url, quality) if file_id: update.message.reply_audio(file_id) db.insert_user_request(url, username) context.bot.send_message(chat_id=owner_chat_id, text=TELEGRAM_LOG.format(username, url)) return try: meta = downloader.download_video(url, quality) except youtube_dl.utils.DownloadError as e: update.message.reply_text(str(e)) return audio_file = open(meta.file_name, 'rb') cover = open(meta.cover, 'rb') response = update.message.reply_audio( audio_file, title=meta.meta.get('title', None), performer=meta.meta.get('uploader', None), duration=meta.meta.get('duration', None), caption=meta.meta.get('title', None), thumb=cover) context.bot.send_message(chat_id=owner_chat_id, text=TELEGRAM_LOG.format(username, url)) db.insert_file_id(url, response['audio']['file_id'], quality) db.insert_user_request(url, username) os.remove(meta.file_name) os.remove(meta.cover)
def tree_update(self, cursor, columns): db = DBManager(self.filename) for i in self.tree.get_children(): self.tree.delete(i) self.tree['show'] = 'headings' self.tree['columns'] = () self.tree['columns'] = tuple(columns) for col in columns: self.tree.column(col, stretch=False, width=100) self.tree.heading(col, text=col) for row in cursor: self.tree.insert('', 'end', values=row)
def load(self, actor_name, actor_role): db_mgr = DBManager() stats = db_mgr.table_primary_stats.get_character(actor_role, actor_name) self.intelligence = stats.intelligence self.reflexes = stats.reflexes self.technique = stats.technique self.dexterity = stats.dexterity self.presense = stats.presense self.willpower = stats.willpower self.constitution = stats.constitution self.strength = stats.strength self.body = stats.body self.move = stats.move
class Engine: def __init__(self): self.hashnet = Hashnet() self.hashnet.load() self.db = DBManager(**db_params) def match(self, target, candidates, threshold=10): hash_matrix = get_hash_matrix(candidates) hashcode, features = target hamming_dists = hamming_dist(hashcode, hash_matrix) indices = np.argpartition(hamming_dists, threshold)[:threshold] shortlist = [candidates[i] for i in indices] feature_matrix = get_feature_matrix(shortlist) dists = cosine_dist(features, feature_matrix) winner = shortlist[dists.argmin()] distance = dists.min() return winner, distance def search(self, target_image, page_size): tick = datetime.now() target_features = self.hashnet.extract_features(target_image) session = self.db.Session() current_page = 0 queryset = session.query(Image).filter(Image.hash != None) optimal_dist = None winner = None while True: query = queryset.limit(page_size).offset(current_page * page_size) candidates = query.all() if not len(candidates): break current_page += 1 matched, distance = self.match(target_features, candidates) if optimal_dist is None or distance < optimal_dist: optimal_dist = distance winner = matched session.close() tock = datetime.now() print("\nSearching completed in {}".format(tock - tick), flush=True) if winner is None: print( "There is nothing found during the search, most likely because there's not images in the database", flush=True) return None, None print("Most similar image is {}\nwith cosine distance={}".format( winner, optimal_dist), flush=True) return winner, 1 - optimal_dist
def run(self): DBManager.get_connection().jconn.setAutoCommit(False) while self.should_run: try: jobs = self.job_queue.get(True, 5) except: self.logger.debug("No item in the queue. Waiting for something to do...") continue self.logger.info("Executing jobs on the database (approx. " + str(self.job_queue.qsize()) + " remaining)") for job in jobs: match = re.search('UPSERT (.*) VALUES .*', job.query_string) self.logger.info(" Job table: " + match.group(1) if match else "finishing group...") self.logger.info(" Job size: " + str(get_size(job.values_list) / 1024.0) + " KB") utilities.try_execution( lambda: DBManager.get_cursor().executemany(job.query_string, job.values_list), "Executing query: " + job.query_string + ", Last tuple: " + str(job.values_list[-1]), self.logger) DBManager.get_connection().commit() self.logger.info("Finished executing jobs on the database") self.logger.info("PersisterThread stopped")
def __init__(self): self._api = VKApi(VK_USER, VK_PASSWORD, version=API_VERSION) self._user = self.get_user() self._user_id = self._user['id'] self._db = DBManager() self._commands = { 'cancel': self.cancel, 'publish': self.publish, 'delayed_publish': self.delayed_publish } print "[INFO] Logged in as %s %s (UID: %s)" % ( self._user['first_name'], self._user['last_name'], self._user_id)
def __init__(self, dbManager = None): # Call methods for creating environments and DB object # Setup our DBManager object if (dbManager is None): self.dbManager = DBManager() else: self.dbManager = dbManager # Try and get our current whois store self.whois = self.dbManager.get("whois") if (self.whois is None): self.dbManager.put("whois", {}) self.whois = {}
def save_changes(self): db = DBManager(self.filename) table = self.combo.get() db.query("DELETE FROM {}".format(table)) cols = str(self.tree['columns'])[1:-1] columns = "(" + cols + ")" values = '' for i in self.tree.get_children(): vals = str(self.tree.item(i)['values'])[1:-1] values += "(" + vals + "), " query = "INSERT INTO {} {} VALUES {}".format(table, columns, values[:-2]) db.query(query)
def update(): # Start logging init_logging() # Create dbmanager and httpclient db_client = DBManager(db_file, db_logger_name) http_client = HttpClient(host, url_paths, http_logger_name) # Create db if not existing if db_client.is_empty(): db_client.create_db() #print(db_client.fetch_products_from_db()) for product_group, url_path in url_paths.items(): html_file = http_client.fetch_html_file(host, url_path) json_file = http_client.parse_html_file(html_file) db_client.add_products(product_group, json_file)
def load(self, tablename): self.name = tablename print(tablename) db_mgr = DBManager() if db_mgr.fuzion_tables.count_options(tablename) > 0: pass else: t = utilities.get_aws_table(tablename) utilities.save_table_to_db(t) t = db_mgr.fuzion_tables.get_table(tablename) for row in t: option = TraitsTableOption() option.fr = int(row.fr) option.to = int(row.to) option.re = str(row.re) option.identifier = str(row.identifier) leads_to = "" if row.leads_to_table is not None: leads_to = row.leads_to_table option.leads_to = leads_to self.options.append(option)
def getAll(self): db = DBManager() db.connect() units = db.find_all() return units
try: if not alotconfig: alotconfig = configfiles[0] settings.write_default_config(alotconfig) settings.read_config(alotconfig) settings.read_notmuch_config(notmuchconfig) except (ConfigError, OSError, IOError), e: sys.exit(e) # store options given by config swiches to the settingsManager: if args['colour-mode']: settings.set('colourmode', args['colour-mode']) # get ourselves a database manager dbman = DBManager(path=args['mailindex-path'], ro=args['read-only']) # determine what to do try: if args.subCommand == 'search': query = ' '.join(args.subOptions.args) cmdstring = 'search %s %s' % (args.subOptions.as_argparse_opts(), query) cmd = commands.commandfactory(cmdstring, 'global') elif args.subCommand == 'compose': cmdstring = 'compose %s' % args.subOptions.as_argparse_opts() cmd = commands.commandfactory(cmdstring, 'global') else: default_commandline = settings.get('initial_command') cmd = commands.commandfactory(default_commandline, 'global') except CommandParseError, e:
def cmb_update(self, _event): db = DBManager(self.filename) cols = db.get_columns_names(self.combo.get()) curs = db.get_rows(self.combo.get()) self.tree_update(curs, cols)
class Wrapper: os = None db = None def __init__(self): self.os = OpenstackBridge() self.db = DBManager() def __generate_user_data(self, name, email, enabled, expire): user = User() user.name = name user.email = email user.enabled = enabled if (is_int(expire)): expire = str(expire) + "d" user.expiration = (timestring.Range("next " + expire)).end project_name = (user.name).title() + "'s project" password = __generate_password() user.project_name = project_name user.password = password return user def __generate_password(self): wordfile = xp.locate_wordfile() mywords = xp.generate_wordlist(wordfile=wordfile, min_length=4, max_length=5) new_pass = xp.generate_xkcdpassword(mywords, delimiter=".", numwords=4) return new_pass # __get_user_data # Receives an ID, which can be a name, email or INT, and # returns the data associated. def __get_user_data(self, obj): db = self.db data = None if (is_int(obj)): data = db.select_by('id', obj) elif obj.find('@') >= 0: data = db.select_by('email', obj) else: data = db.select_by('name', obj) if data is None: sys.exit(0) return data def __db_add_user(self, user): db = self.db db.insert(user) def __create_user(self, user): print warnings.filterwarnings("ignore") INFO(3) self.os.register_user(user) INFO(4) self.os.create_network(user) user.history.register() self.__db_add_user(user) if (user.enabled is False): self.os.update_user({'user_id':user.user_id, 'project_id': user.project_id, 'enabled':False }) NOTIFY(5) def __update_user(self, id, dict): db = self.db u = self.__get_user_data(id) user = User() user.load(u) dict['project_id'] = user.project_id dict['user_id'] = user.user_id if 'name' in dict: user.name = dict['name'] if 'email' in dict: user.email = dict['email'] if 'enabled' in dict: if (isinstance(dict['enabled'], unicode)): try: v = ast.literal_eval(dict['enabled'].title()) setattr(user, 'enabled', v) dict['enabled'] = v except: sys.exit(0) else: user.enabled = dict['enabled'] user.history.register() if 'expiration' in dict: if user.enabled: exp = timestring.Date(dict['expiration']) user.expiration = exp else: MSG("user "+ user.name + " is not enabled") return # self.os.update_user(dict) self.db.update(user) def __confirmation(self): yes = set(['yes', 'y', 'ye']) no = set(['no', 'n']) add = '' while (add not in yes) and (add not in no): add = GET_INPUT(2) if (add not in yes) and (add not in no): ERROR(1) sys.exit() if add in yes: break else: ERROR(2) sys.exit() def __line_highlight(self, line, color): line[0] = color(line[0]) line[-1] = color(line[-1]) + NORMAL() def __line_color(self, line, color): for i in range(0, len(line)): line[i] = FRRED(line[i]) # ADD # Generates user information given name and email. # Gets confirmation if needed and adds user to the # database. def add(self, name, email, enabled, expire, yes): user = self.__generate_user_data(name, email, enabled, expire) show_full_info(user) if not yes: self.__confirmation() self.__create_user(user) # SHOW # Displays user information retrieved from the database. # Receives an id, which can be a name, an email or an INT. def show(self, id): u = User() load = self.__get_user_data(id) u.load(load) p = self.os.get_project(u) show_project(u, p) show_project(u) # DELETE # Deletes one or multiple users from the database. # Receives an id, which can be a name, an email or an INT. # TODO: Delete Openstack credentials. def delete(self, list): for id in list: u = self.__get_user_data(id) # self.db.delete(u) # MODIFY # Modifies users information, both from database and Openstack. # Receives an id, which can be a name, an email or an INT, and # a dictionary with fields to modify. def modify(self, id, dict): self.__update_user(id, dict) # MIGRATE # Takes users who are in Openstacks database and copies them # over Alice database. Ignores duplicates, services and admin. def migrate(self): db = self.db all_users = self.os.get_user() services = ['ceilometer', 'nova', 'neutron', 'glance', 'keystone', 'admin'] for user in all_users: if user.name not in services: try: found = db.select_by('email', user.email) if found is None: u = User() u.user_id = user.id u.project_id = user.default_project_id u.email = user.email u.enabled = user.enabled u.name = user.name u.expiration = (timestring.Range("next 30d")).end u.history.register() db.insert(u) except: continue # LIST # Retrieves users given a filter from the database and displays # in a table manner. def list(self, highlight, filter): db = self.db fetch = None # List filter select if filter is not None and filter != "disabled": fetch = db.find_enabled(True) elif (filter == "disabled"): fetch = db.find_enabled(False) else: fetch = db.select_all() t = PrettyTable(['ID', 'Name', 'Email', 'Status', 'Expires in']) t.borders = False t.vrules = 2 for row in fetch: u = User() u.load(row) if u.expiration is None and u.enabled is True: exp = u.history.last_seen() + "30d" self.db.add_expiration(u.id, exp) u.expiration = exp if u.enabled is True: elapsed = (u.history.time_left()).elapse # String too long try: elapsed = elapsed[:elapsed.index("hour") + 5] except: elapsed = elapsed[:elapsed.index("min") + 7] # On hold, expired or active state = DateParser(u.history.time_left()).state v = [u.id, u.name, u.email, state.title(), elapsed] # Enables highlight if highlight is True: if state == 'expired': self.__line_highlight(v, BGRED) if state == 'hold': self.__line_highlight(v, BGYELLOW) else: if state == 'expired': self.__line_color(v, FRRED) if state == 'hold': self.__line_color(v, FRYELLOW) if str(filter) == state or filter is None or filter == "enabled": t.add_row(v) else: state = 'Disabled' t.add_row([FRDIM(u.id), FRDIM(u.name), FRDIM(u.email), FRDIM(state), FRDIM("---")]) print t
def __init__(self): self.os = OpenstackBridge() self.db = DBManager()
def main(): if len(sys.argv) > 1: db = DBManager() command = sys.argv[1] if command == "do-everything": if len(sys.argv) > 2: path = sys.argv[2] else: path = input("Please enter the path to the directory to load: ") db.destroy() db = DBManager() validate = input("Do you want to validate the CSV files? This takes longer and has probably already been done. (Y/N): ") if validate.lower() == 'y' or validate.lower() == "yes": validateFolderCSV(path) loadFolder(db, path) loadGoldenSet(db) print("\nDatabase populated!\n") print("Generating search crossover report. Will be known as 'crossover-report.csv'") report = reports.generateReportCrossover(db) report = parse.reportToCSV(report) with open("crossover-report.csv", "w") as out: out.write(report) print("Generating search count by year report. Will be known as 'year-report.csv'") report = reports.generateReportByYear(db) report = parse.reportToCSV(report) with open("year-report.csv", "w") as out: out.write(report) print("Generating author count by search report. Will be known as 'author-report.csv'") report = reports.generateAuthorReport(db) report = parse.reportToCSV(report) with open("author-report.csv", "w") as out: out.write(report) elif command == "report-crossover": report = reports.generateReportCrossover(db) report = parse.reportToCSV(report) print(report) elif command == "report-by-year": report = reports.generateReportByYear(db) report = parse.reportToCSV(report) print(report) elif command == "report-by-authors": report = reports.generateAuthorReport(db) report = parse.reportToCSV(report) print(report) elif command == "compile-folder": if len(sys.argv) > 2: path = sys.argv[2] else: path = input("Please enter the path to the folder to compile: ") while not isdir(path): path = input("Error: The input path must be a directory.\nPlease enter another path: ") if len(sys.argv) > 3: outputFile = sys.argv[3] else: outputFile = input("Please enter the name of the output file: ") filePaths = [] filePaths = getFilePaths(path) if len(filePaths) > 0: contents = parse.compileFolder(filePaths) outFile = open(outputFile, 'w') outFile.write(contents) outFile.close() else: print("The directory you entered was empty. So nothing happened.") elif command == "validateCSV": # For a given directory path, # traverses through each file and ensures # that they each follow valid CSV format path = sys.argv[2] validateFolderCSV(path) elif command == "load": # Loads an entire folder of CSV files and the golden set # into the database path = sys.argv[2] loadFolder(db, path) loadGoldenSet(db) elif command == "shell": # Run the shell shell.run(db) else: print("Unsupported command. Please see README for supported operations.") db.shutdown() else: print("No commands entered. Please see README for supported operations.")
def set_max_quality(update, context): db = DBManager() db.set_chat_settings(update.message.chat_id, 'max') update.message.reply_text("Max quality is set")
def start(update, context): db = DBManager() db.create_new_user(update.message.chat_id) """Send a message when the command /start is issued.""" update.message.reply_text( 'Hi! This is yputube downloader bot. Enter a link to download video.')
presense = self.stats.presense strength = self.stats.strength constitution = self.stats.constitution willpower = self.stats.willpower body = self.stats.body move = self.stats.move packed_stats = {'int':intelligence, 'ref':reflexes, 'tech':technique, 'dex':dexterity, 'pre':presense, 'str':strength, 'con':constitution, 'will':willpower, 'body':body, 'move':move} models.upload_to_aws(name=name, role=role, packed_stats=packed_stats) db_mgr = DBManager() db_mgr.table_primary_stats.save_character(character_name=name, role=role, intelligence=intelligence, reflexes=reflexes, technique=technique, dexterity=dexterity, presense=presense, strength=strength, constitution=constitution, willpower=willpower, body=body, move=move)''' view = View( Item('character_name', style='custom', show_label=False), Item('stats', style='custom'), Item('save', show_label=False), Item('load', show_label=False) # Item('upload', show_label=False) ) if __name__ == '__main__': s = StandaloneContainer() db_mgr = DBManager() s.configure_traits()
class User(): '''Used to represent one User in the app. There is a class wide STANDINGS attritbute which holds the records for each team. This is used to sort teams based on their records and retain the same order across all instances of User ''' STANDINGS = None def __init__(self, **kwargs): if self.STANDINGS is None: raise Exception('You must set the standings dictionary first') self.name = kwargs.get('name') #self.teams = kwargs.get('teams') self.wins = 0 self.losses = 0 self.ties = 0 self.db = DBManager() self.teams = self.sort_by_rec(kwargs.get('teams')) self.fill_in_record() def __repr__(self): return 'User({})'.format(self.name) @classmethod def set_standings(cls, standings): cls.STANDINGS = standings def set_name(self, name): self.name = name def set_teams(self, teams): self.teams = teams def fill_in_record(self): for team in self.teams: win, loss, tie = self.STANDINGS.get(team[0]) self.wins += win self.losses += loss self.ties += tie self.wins = int(self.wins) self.losses = int(self.losses) self.ties = int(self.ties) def get_win_percentage(self): try: return self.wins / sum((self.wins, self.losses, self.ties)) except ZeroDivisionError: return 0.0 def get_record_totals(self): '''Zip win, loss, and tie together and sum to get a quick total of all teams scores''' win, loss, tie = (0 for i in xrange(3)) for team in self.teams: #zip win, loss, tie together and sum #them to get a quick total win, loss, tie = (sum(i) for i in zip( (win, loss, tie), self.db.get_record(team))) return win, loss, tie def sort_by_rec(self, teams): def _cmp(team): w, l, t = self.STANDINGS.get(team) try: return w / sum((w, l, t)) except ZeroDivisionError: return 0.0 teams = sorted(teams, key=_cmp, reverse=True) return zip(teams, [self.STANDINGS.get(t) for t in teams])
def saveValue(self, data): db = DBManager() db.connect() data['waterunit'] = str(self.unit) data['totalunit'] = str(self.totalUnit) return db.save(data)
class ProtobufServer(obj.BetaChatAppServicer): ''' ProtobufServer wraps the autogenerated protobuf code for server-side functions. This server implements the handling of the rpc functions specified in service.proto. Each function is passed a request argument, which is a protobuf message object (e.g. a CMessage or User) defined in service.proto. The server communicates with the database and returns a protobuf message object or list of these objects (e.g. Response, Group, User) so that the protobuf client wrapper can understand the server's response. ''' def __init__(self): ''' Initializes the protobuf server by setting up a database manager object for the server to communicate with. :return: ProtobufServer object ''' self.db = DBManager() @add_logging def rpc_send_message(self, request, context): ''' Sends the specified message in the message object to the specified user in the message object by inserting this message into the user table in the database. :param request: CMessage protobuf object. :return: protobuf Response object ''' try: to_id = request.to_id from_id = self.db.get_user_id(request.from_name) msg = request.msg self.db.insert_message(to_id, from_id, msg) except Exception as e: return obj.Response(errno=1, msg=e) return obj.Response(errno=0, msg="success!\n") @add_logging @list_to_protobuf(obj.CMessage) def rpc_get_messages(self, request, context): ''' Retrieves all the messages stored in the messages database table under this user's name. :param request: User protobuf object. :return: protobuf Message object stream on success, or empty list on error ''' try: msgs = self.db.get_messages(request.u_id, request.checkpoint) except Exception as e: return [] return msgs @add_logging def rpc_create_conversation(self, request, context): ''' Creates a new conversation between two users by inserting a new "virtual group" into the database, and returns the group object corresponding to the new conversation. :param request: UserPair protobuf object. :return: Group protobuf object on success or error string ''' try: u1 = self.db.get_user_id(request.username1) u2 = self.db.get_user_id(request.username2) gid = self.db.get_or_create_vgid(u1, u2) return obj.Group(g_id=gid) except Exception as e: return obj.Response(errno=1, msg=str(e)) @add_logging def rpc_create_group(self, request, context): ''' Creates a new group with 0 members by creating a new group into the database. The group must have a unique identifier, or else an error is returned :param request: Group protobuf object. :return: Group protobuf object on success, empty Group (with no name) on error ''' g_name = request.g_name try: self.db.create_group(g_name) g_id = self.db.get_group_id(g_name) except Exception as e: return obj.Group(g_id=0, g_name="") return obj.Group(g_id=g_id, g_name=g_name) @add_logging def rpc_create_account(self, request, context): ''' Creates a new user account. The user must have a unique identifier, or else an error is returned. :param request: User protobuf object. :return: User protobuf object on success, empty User (with no name) on error ''' username = request.username try: self.db.create_account(username) u_id = self.db.get_user_id(username) except Exception as e: return obj.User(u_id=0, username="") return obj.User(u_id=u_id, username=username) @add_logging def rpc_remove_account(self, request, context): ''' Removes the account corresponding to the specified user. :param request: User protobuf object. :return: protobuf Response object indicating success or error ''' try: self.db.remove_account(request.username) except Exception as e: return obj.Response(errno=1, msg=e) return obj.Response(errno=0, msg="success!\n") @add_logging def rpc_edit_group_name(self, request, context): ''' Modify the group name of the specified group. :param request: Group protobuf object. :return: protobuf Response object indicating success or error ''' try: self.db.edit_group_name(request.g_name, request.new_name) except Exception as e: return obj.Response(errno=1, msg=e) return obj.Response(errno=0, msg="success!\n") @add_logging def rpc_remove_group_member(self, request, context): ''' Remove a member from the specified group. :param request: Group protobuf object (with "edit_member_name" field specified) :return: protobuf Response object indicating success or error ''' try: self.db.remove_group_member(request.g_name, request.edit_member_name) except Exception as e: return obj.Response(errno=1, msg=e) return obj.Response(errno=0, msg="success!\n") @add_logging def rpc_add_group_member(self, request, context): ''' Add a member from the specified group. :param request: Group protobuf object (with "edit_member_name" field specified) :return: protobuf Response object indicating success or error ''' try: self.db.add_group_member(request.g_name, request.edit_member_name) except Exception as e: return obj.Response(errno=1, msg=e) return obj.Response(errno=0, msg="success!\n") @add_logging @list_to_protobuf(obj.User) def rpc_list_group_members(self, request, context): ''' List members in the specified group. :param request: Group protobuf object (with "edit_member_name" field specified) :return: stream of protobuf User objects (NULL User returned if error) ''' try: users = self.db.get_group_members(request.g_name) except Exception as e: return [{'username': '******'}] return users @add_logging @list_to_protobuf(obj.Group) def rpc_list_groups(self, request, context): ''' List groups matching the specified pattern :param request: Pattern protobuf object :return: stream of protobuf Group objects (NULL Group returned if error) ''' try: groups = self.db.get_groups(request.pattern) except Exception as e: return [{'g_id' : 0, 'g_name': 'NULL'}] return groups @add_logging @list_to_protobuf(obj.User) def rpc_list_accounts(self, request, context): ''' List users matching the specified pattern :param request: Pattern protobuf object :return: stream of protobuf User objects (NULL User returned if error) ''' try: users = self.db.get_accounts(request.pattern) except Exception as e: return [{'username': '******'}] return users
def __init__(self): self.bot = TeleBot(self.token) logger.setLevel(logging.INFO) # Outputs debug messages to console. self.db = DBManager() self.handler = Handler(self.bot)
ssh_private_key = '/home/ethan/.ssh/mac_id_rsa' else: ssh_private_key = '/Users/ethan/.ssh/id_rsa' server = SSHTunnelForwarder( ("121.41.12.158", 22), ssh_username='******', ssh_pkey=ssh_private_key, remote_bind_address=( db_params['host'], 3306 ) ) server.start() db_params['host'] = server.local_bind_host db_params['port'] = server.local_bind_port db = DBManager(**db_params) storage = Storage(**oss_params) hashnet = Hashnet() hashnet.load() print("Scanning database for images not being processed...") session = db.Session() results = session.query(Image).filter( or_( Image.features == None, Image.hash == None ) ).all() n = len(results) print("Scanning complete! {} in total.\n".format(n))
def __init__(self, master): # Main window self.master = master self.master.title('Simple DB viewer') self.master.minsize(width=640, height=480) menubar = Menu(self.master) menu = Menu(self.master, tearoff=0) menu.add_command(label='Создать базу данных') menu.add_command(label='Открыть базу данных') menu.add_separator() menu.add_command(label='Выход') menubar.add_cascade(label='Файл', menu=menu) menu = Menu(self.master, tearoff=0) menu.add_command(label='Записать изменения') menu.add_command(label='Удалить изменения') menubar.add_cascade(label='База данных', menu=menu) self.master.config(menu=menubar) tree_frame = ttk.Frame(self.master) btns_frame = ttk.Frame(self.master) # Connect to DB self.db = DBManager('db-name') # Combobox with select of table self.combo = ttk.Combobox(btns_frame) self.combo['values'] = () for row in self.db.get_tables_names(): self.combo['values'] += row self.combo.current(0) self.combo.bind('<<ComboboxSelected>>', self._combo_update) self.tree = ttk.Treeview(tree_frame, selectmode='extended') tree_x_scroll = ttk.Scrollbar(tree_frame, orient='hor', command=self.tree.xview) tree_y_scroll = ttk.Scrollbar(tree_frame, orient='vert', command=self.tree.yview) self.tree.configure(yscrollcommand=tree_y_scroll.set) self.tree.configure(xscrollcommand=tree_x_scroll.set) tree_y_scroll.pack(side='right', fill='y') self.tree.pack(side='top', fill='both', expand=True) tree_x_scroll.pack(side='bottom', fill='x') self._combo_update() # Buttons add = ttk.Button(btns_frame, text='Добавить запись') rm = ttk.Button(btns_frame, text='Удалить запись') run_sql = ttk.Button(btns_frame, text='Выполнить SQL') # Packs all elements tree_frame.pack(side='top', fill='both', expand=True) btns_frame.pack(side='bottom', fill='both') self.combo.pack(side='left', padx=5, pady=5) add.pack(side='left', padx=5, pady=5) rm.pack(side='left', padx=5, pady=5) run_sql.pack(side='left', padx=5, pady=5)
def __init__(self): self.hashnet = Hashnet() self.hashnet.load() self.db = DBManager(**db_params)