class AuthResource(object): def __init__(self): self.database = Database() self.logger = logging.getLogger("AuthResource") def on_post(self, req, resp): raw_json = req.context['doc'] AUTH_SCHEMA.validate(raw_json) username, password = raw_json.get("username"), raw_json.get("password") if self.is_valid_login(username, password): self.logger.info("Successful login by user %s" % username) resp.body = self.create_auth_doc(username) resp.body["token"] = resp.body["_id"] else: self.logger.warning("Failed login for user %s" % username) raise falcon.HTTPUnauthorized("Invalid credentails", "Failed to authenticate due to invalid username or password") def is_valid_login(self, username, password): user = self.database.get_doc(DB_USER, username, default=None) if user is None: return False hashed_password = hash(password, user[make_private("password_salt")]) return user[make_private("encrypted_password")] == hashed_password def create_auth_doc(self, username): self.logger.info("Creating auth doc for user %s" % username) doc = { "_id": generate_auth_token(), "username": username, "expires": datetime.datetime.now() + CONFIG["auth"]["expiry"] } return self.database.save_doc(DB_AUTH, doc)
def get_person(person_id): try: db = Database() person = db.get_person(person_id) return jsonify({'person': person}) except: abort(500)
def get_leaderboard(numResults = None): if numResults: results = Database.find(COLLECTION, {}, "score", -1, numResults) else: results = Database.find(COLLECTION, {}, "score", -1) return results
def __init__(self, eui): self.eui = eui self.exist = False db = Database("lora_network") if db is not None: result = db.query("SELECT * FROM gateways WHERE eui="+str(int(self.eui, 16))) if result is not None: self.exist = True self.region = result[0][1] self.maxTxPower_dBm = result[0][2] self.allowGpsToSetPosition = result[0][3] self.time = result[0][4] self.latitude = result[0][5] self.longitude = result[0][6] self.altitude = result[0][7] self.ntwkMaxDelayUp_ms = result[0][8] self.ntwkMaxDelayDn_ms = result[0][9] self.uppacketsreceived = result[0][10] self.gooduppacketsreceived = result[0][11] self.uppacketsforwarded = result[0][12] self.uppacketsacknowedgedratio = result[0][13] self.downpacketsreceived = result[0][14] self.packetstransmitted = result[0][15] self.lastuppacketid = result[0][16] self.dspVersion = result[0][17] self.fpgaVersion = result[0][18] self.halVersion = result[0][19]
class Team(): def __init__(self): self.data = [] self.db = Database() self.db.connect() def load(self, tid): sql = ('SELECT * ' 'FROM tbl_teams t ' 'WHERE t.ID = %s') print('Query defined') rs = self.db.query(sql, (tid, )) print('Query executed') if (rs.with_rows): records = rs.fetchall() print('Data retrieved') for index, item in enumerate(records): print(str(index)) print(str(type(item))) for key, value in enumerate(item): print(str(value)) # self.data.index = self.data.item[index] return self.data
class ProfileThread(threading.Thread): def __init__(self, thread_name, profiles_queue, visited_profiles_queue): threading.Thread.__init__(self, name=thread_name) self.api = TwitterApi() self.db = Database() self.profiles_queue = profiles_queue self.visited_profiles_queue = visited_profiles_queue def run(self): while self.profiles_queue.qsize() >= 0: if self.profiles_queue.qsize() == 0: time.sleep(120) continue uid = self.profiles_queue.get() if self.db.is_existed(uid, "profile"): self.profiles_queue.task_done() print "skip ",uid continue print "Profiles...I am alive", uid user_profile = self.api.get_user_profile(uid) if user_profile == None: self.db.record_failure(failed_proile=uid) self.profiles_queue.task_done() else: self.db.insert_profile(user_profile) self.visited_profiles_queue.put(uid) #self.db.update_profile_progress(self.profiles_queue, self.visited_profiles_queue) self.profiles_queue.task_done() print "%s Profiles Finished:\t\t %d" % (datetime.datetime.now().strftime("%Y-%m-%d %H:%M:%S"), self.visited_profiles_queue.qsize()) print "%s Profiles Left:\t\t %d" % (datetime.datetime.now().strftime("%Y-%m-%d %H:%M:%S"), self.profiles_queue.qsize()) time.sleep(6)
class FollowingThread(threading.Thread): def __init__(self, thread_name, followings_queue, profiles_queue, visited_followings_queue, visited_profiles_queue): threading.Thread.__init__(self, name=thread_name) self.api = TwitterApi() self.db = Database() self.followings_queue = followings_queue self.profiles_queue = profiles_queue self.visited_followings_queue = visited_followings_queue self.visited_profiles_queue = visited_profiles_queue def run(self): while self.followings_queue.qsize() > 0: uid = self.followings_queue.get() if self.db.is_existed(uid, "following_ids"): self.followings_queue.task_done() print "skip ", uid continue if type(uid) is types.IntType: print "Followings...I am alive ", uid followings = self.api.get_user_followings(uid=uid) else: followings = self.api.get_user_followings(sname=uid) # download followings ids if followings == None: self.db.record_failure(failed_following=uid) # time.sleep(61) self.followings_queue.task_done() continue self.db.insert_following(uid, followings) self.visited_followings_queue.put(uid) # add ids to task queues followings_for_profiles_queue = self.exclude_processed_profiles(followings) followings_for_followings_queue = self.exclude_processed_followings(followings) for id in followings_for_profiles_queue: self.profiles_queue.put(id) for id in followings_for_followings_queue: self.followings_queue.put(id) #self.db.update_following_progress(self.followings_queue, self.visited_followings_queue) self.followings_queue.task_done() print "%s Followings Finished:\t %d" % (datetime.datetime.now().strftime("%Y-%m-%d %H:%M:%S"), self.visited_followings_queue.qsize()) print "%s Followings Left:\t\t %d" % (datetime.datetime.now().strftime("%Y-%m-%d %H:%M:%S"), self.followings_queue.qsize()) # time.sleep(61) def exclude_processed_profiles(self, followings): profiles_set = set(self.visited_profiles_queue.queue).union(set(self.profiles_queue.queue)) return set(followings).difference(profiles_set) def exclude_processed_followings(self, followings): followings_set = set(self.visited_followings_queue.queue).union(set(self.followings_queue.queue)) return set(followings).difference(followings_set)
def store(self, episode: Episode): """ Stores the given episode in its respective store directory and renames it to its store filename according to the configurator assigned with the organizer. :param episode: episode to be store. """ downloaded_dir = self.config.episode_downloaded_dir(episode) video_file = videofiles.find_episode_file(downloaded_dir) # create the store directory store_dir = self.config.episode_store_dir(episode) os.makedirs(store_dir, exist_ok=True) # build episode path from its store directory, filename and the video # file extension filename = self.config.episode_filename(episode) extension = os.path.splitext(video_file)[1] complete_filename = filename + extension store_path = os.path.join(store_dir, complete_filename) video_path = os.path.join(downloaded_dir, video_file) # TODO: implement the moving in a separate thread # the move should be done in a separate thread because it will work # as a copy (very slow) if the destination folder is not in the same # filesystem self._move_episode_file(video_path, store_path) # episode must be only marked as STORED after finishing moving the # episode file database = Database(self.database_file) database.set_state(episode, EpisodeState.STORED)
def create_tables(): return #TODO remove database = Database() connection, metadata = database.connect() print(Offer.__table__) print(models.Base.metadata.create_all(connection))
def setUp(self): super(DatabaseTestCase, self).setUp() def _response(db): self.db = db() self.stop() Database.init(self.io_loop, _response) self.wait()
def __init__(self): threading.Thread.__init__(self) self._finished = threading.Event() self._interval = 15.0 DB = Database(TASK_MANAGER_NAME) self.COLL = DB.use_coll(TASK_COLL) self.tasks_list = list(self.COLL.find())
def mput_colonys(colonys): table = colonys[0].tablename sql = "INSERT INTO %s" % table sql += " (%s)" % ", ".join(colonys[0].items) sql += " VALUES " colony_id = get_maxid(table) + 1 vals = [] for colony in colonys: vals += ["(%d,%d,%d,%d,'%s','%s','%s','%s')" % ( colony_id, colony.exp_id, colony.col, colony.row, colony.location, colony.areas, colony.masss, colony.cmasss ) ] colony_id += 1 sqls = [] capa = 100 # capacity n = len(vals) n_pack = n/capa for i in range(n_pack): sqls += [sql + ", ".join(vals[capa*i:capa*(i+1)])] sqls += [sql + ", ".join(vals[capa*n_pack:n])] db = Database() db.execute_sqls(table, sqls)
def mput_growths(growths): table = growths[0].tablename sql = "INSERT INTO %s" % table sql += " (%s)" % ", ".join(growths[0].items) sql += " VALUES " growth_id = get_maxid(table) + 1 vals = [] for growth in growths: vals += ["(%d,%d,%d,%d,%s,%f,%f,%f)" % ( growth_id, growth.exp_id, growth.col, growth.row, growth.con, growth.ltg, growth.mgr, growth.spg ) ] growth_id += 1 sqls = [] capa = 100 n = len(vals) n_pack = n/capa for i in range(n_pack): sqls += [sql + ", ".join(vals[capa*i:capa*(i+1)])] sqls += [sql + ", ".join(vals[capa*n_pack:n])] db = Database() db.execute_sqls(table, sqls)
def get(self, id): db = Database() db.commit_statement('''select * from playlists where id = ?''', [int(id)]) row = db.fetchone() if row == None: raise DogError, 'Could not get playlist with id=' + id return Playlist(id, row['name'], db)
def get_by_name(self, name): db = Database() db.commit_statement('''select * from playlists where name = ?''', [name]) row = db.fetchone() if row == None: raise DogError, 'Could not get playlist with id=' + id return Playlist(row['id'], row['name'], db)
def start(self): """ Run a simple daemon for now. """ monitor_all_processes = True db = Database() conf = Config('../tests/pidscribe.ini') while True: if monitor_all_processes: for process in self.processes(): try: loaded_process = LoadedProcess(process) db.record_process(loaded_process) except ProcessError: pass else: for process in self.processes(): if [p for p in conf.processes if p == process.name()]: try: loaded_process = LoadedProcess(process) db.record_process(loaded_process) except ProcessError: pass print('Sleeping...') sleep(15)
def __init__(self): threading.Thread.__init__(self) init_keyval_storage(conf.keyval_db) _Database.__init__(self) self.dbus_service = None self.values={} self.protocols=[] self.setDaemon(True) # Initialize and activate all plugins of 'Protocols' category global manager manager = PluginManager(categories_filter={ "Protocols": protocols}) manager.setPluginPlaces(conf.plugin_dirs) manager.collectPlugins() activated_plugins = [] failed_plugins= [] plugins = {plugin.name : plugin for plugin in manager.getPluginsOfCategory('Protocols')} for plugin_name in conf.enabled_plugins: if plugin_name in plugins: try: plugin = plugins[plugin_name] plugin.plugin_object.activate(conf.plugin_conf[plugin.name], globals(), self) self.protocols.append(plugin) activated_plugins.append(plugin.name) except Exception as e: failed_plugins.append(plugin.name) if conf.command == 'debug': raise else: logger.info('%s plugin error: %s'%(plugin_name, str(e)) ) if activated_plugins: logger.info('Activated plugins: %s'%', '.join(activated_plugins)) if failed_plugins: logger.info('Failed to activate plugins: %s'%', '.join(failed_plugins)) self.start()
def do_refresh_series(series_id_str, config, debug): try: thetvdb = TheTvDb(config, debug) database = Database(config, debug) try: series_id = int(series_id_str) except: print "Argument is not a valid series id: %s" % (series_id_str, ) return 1 series = database.get_series(series_id) if series is not None: new_series = thetvdb.get_series_info(series.id) new_series.watch = series.watch database.clear_series(series.id) database.clear_all_episodes(series.id) database.add_series(new_series) episodes = get_episodes(thetvdb, database, new_series) else: new_series = thetvdb.get_series_info(series_id) series = new_series database.add_series(new_series) episodes = get_episodes(thetvdb, database, new_series) print "Metadata for series '%s' and its episodes has been cleared from the local cache and reloaded from TheTvDb.com." % (series.title, ) return 0 except: traceback.print_exc() return 11
def __init__(self, path): SnippetManagerBase.__init__(self) self._BD = Database(path) # se vuelven a reobtener los snippets desde esta nueva bd self._Snippets = self.getAllSnippets() # trae si existe, el valor de la bd a cargar por defecto defaultBdName = self._Configs.defaultBdName if defaultBdName : # obtiene el indice de la ruta de la bd a cargarce pathBD = self.getPathBD( self.getIndexBdName(defaultBdName)) # crea la instancia de el catalogo en cuestion self._BD = Database(pathBD) # instancia creada correctamente self._estado = True elif self._AllPathDBs: # sino, se carga la primer bd encontrada en la lista de bds self._BD = Database( self.getPathDB(0)) # diccionario con todas las instancia de objeto Snippet self.Snippets = self.getAllSnippets() # instancia creada correctamente self._estado = True
def render_POST(self, request): error = '' user = cgi.escape(request.getCookie('user')) title = cgi.escape(request.args['title'][0]) try: booktype = cgi.escape(request.args['type'][0]) except: booktype = '' error = 'Select Fiction or Non-fiction' rating = cgi.escape(request.args['rating'][0]) author = cgi.escape(request.args['author'][0]) numpages = cgi.escape(request.args['numpages'][0]) yearpub = cgi.escape(request.args['yearpub'][0]) yearread = cgi.escape(request.args['yearread'][0]) if not title: error = 'Book must have a title' if error: return self.write_form(title, booktype, rating, author, numpages, yearpub, yearread, error) db = Database() db.modBook(self.idnum, user, title, booktype, rating, author, numpages, yearpub, yearread) request.redirect('/books/' + self.idnum) request.finish() return server.NOT_DONE_YET
def main(): try: app = QApplication(sys.argv) #app.setStyle('cleanlooks') app.setStyleSheet(qdarkstyle.load_stylesheet()) db = Database('sample.db') icon = QIcon('app.ico') app.setWindowIcon(icon) # set the default font for the app app.setFont(MEDIUM_FONT) app.setStyle(BUTTON_STYLE) main_view = MainView() main_view.showMaximized() main_view.show() app.exec_() # clean up and exit code for model in main_view.models: model.submitAll() # commit all pending changes to all models db.close() app.quit() sys.exit(0) #os._exit(0) except SystemExit: print("Closing Window...") except Exception as e: print(str(e))
def check_sources(project_name): '''check the sources status''' project_db = Database(project_name) project_db.create_colls() sources_nb = project_db.sources.count() error_nb = len([n["status"][-1] for n in project_db.sources.find() if n["status"][-1] is False]) ok_nb = len([n["status"][-1] for n in project_db.sources.find() if n["status"][-1] is True]) http_error = len([n["code"][-1] for n in project_db.sources.find() if n["code"][-1] == 400]) content_error = len([n["code"][-1] for n in project_db.sources.find() if n["code"][-1] == 404]) forbidden_error = len([n["code"][-1] for n in project_db.sources.find() if n["code"][-1] == 403]) extraction_error = len([n["code"][-1] for n in project_db.sources.find() if n["code"][-1] == 700]) others = len([n["code"][-1] for n in project_db.sources.find() if n["code"][-1] not in [100, 700, 400, 403, 404]]) print "Error nb: %d sources on %d total" %(error_nb, sources_nb) print "Error type:" print "- %d network errors (impossible to acess to the website)" %(http_error+forbidden_error) print "- %d errors because page is not HTML (PDF or Video, or img ...)" %content_error print "- %d errors on extracting the text" %extraction_error print "- %d errors undefined" %others print "\n" print "Details:" for n in project_db.sources.find(): if n["code"][-1] != 100 and n["code"][-1] not in [700, 400, 403, 404]: print "\t-", n["url"] print n["code"][-1],"\t", n["msg"][-1]
def test_database(self): try: db=Database(self.config['Database']['host'],self.config['Database']['dbname'],self.config['Database']['user'],self.config['Database']['pass']) db.close() self.assertTrue(True) except DatabaseError, e: self.assertFalse(True,str(e))
def breed(): alive_number = Constants.POPULATION_SIZE - Constants.KILL_SIZE sorted_individuals = [i['parameters'] for i in Database.get_all_individuals_sorted()[:alive_number]] for i in range(Constants.KILL_SIZE): pick1, pick2 = random.sample(sorted_individuals, 2) parameters = genetic.combine_and_mutate(pick1, pick2) Database.add_individual_to_current_generation(parameters)
class SystemInfo(object): def __init__(self): if '0x0100' in system(): key='0x0100' else: key='0x0001' self.SKU=system()[key]['data']['SKU Number'] self.serial=system()[key]['data']['Serial Number'] self.product_name=system()[key]['data']['Product Name'] self.vendor=system()[key]['data']['Manufacturer'] self.database=Database() def sethostinfo(self,warranty): sel_sqlcode='''SELECT hostname FROM warranty WHERE hostname=%s and serialnum=%s and vendor=%s''' self.database.curs.execute(sel_sqlcode,(warranty['hostname'], warranty['serial'],warranty['vendor'])) if len(self.database.curs.fetchall()) < 1: ins_sqlcode='''INSERT INTO warranty (hostname,SKU,system_type,serialnum,vendor) VALUES(%s,%s,%s,%s,%s)''' try: self.database.curs.execute(ins_sqlcode,(warranty['hostname'],warranty['SKU'], warranty['type'],warranty['serial'], warranty['vendor'])) except Exception, e: self.database.rollback(e) else: self.database.finish()
def get_by_name(self, name): db = Database() db.commit_statement("""select * from playlists where name = ?""", [name]) row = db.fetchone() if row == None: raise ValueError("Could not get playlist with id=" + str(id)) return Playlist(row["id"], row["name"], db)
def populate_current_generation_if_empty(): if not Database.current_generation_is_empty(): return print "Adding new random individuals to generation." for i in xrange(0, Constants.POPULATION_SIZE): newIndividualParameters = genetic.random_individual_parameters() Database.add_individual_to_current_generation(newIndividualParameters)
def write_form(self, username, firstyear=str(date.today().year), lastyear=str(date.today().year)): db = Database() avgRating = db.getAvgRating(username, firstyear, lastyear) totalPages = db.getTotalPages(username, firstyear, lastyear) avgPubYear = db.getAvgPubYear(username, firstyear, lastyear) mytemplate = Template(filename='templates/bookstats.html') return str(mytemplate.render(firstyear=firstyear, lastyear=lastyear, avgRating=avgRating, totalPages=totalPages, avgPubYear=avgPubYear))
def get(self, id): db = Database() db.commit_statement("""select * from playlists where id = ?""", [int(id)]) row = db.fetchone() if row == None: raise ValueError("Could not get playlist with id=" + str(id)) return Playlist(id, row["name"], db)
def main(): conf = Config() db = Database(conf.address, conf.username, conf.password, conf.database_name) valid_studies_counter = 0 for i in range(0, count): try: scraped_url = "http://drks-neu.uniklinik-freiburg.de/drks_web/navigate.do?navigationId=trial.HTML&TRIAL_ID=DRKS" + str( i).zfill(8) request = requests.get(scraped_url, stream=True) request.encoding = 'utf-8' my_scraper = BeautifulSoup(request.text.encode('utf8'), "lxml") if not (my_scraper.find('ul', class_="error") or my_scraper.find('ul', class_="errors")): # if true webpage contains valid data of study Trials(my_scraper, db) valid_studies_counter += 1 sys.stdout.write( "\r Processing " + str(i) + "/" + str(count) + " => " + str( i * 100 / count) + " percent\t\t" + str(valid_studies_counter) + " studies found") sys.stdout.flush() except AttributeError: print("\n[-] attribute error at " + str(i)) except Exception: print("\n[-] unknown error at " + str(i)) db.close_connection()
class App_backend: def __init__(self, directory: str, db_name="bingo.db") -> None: logger.info(directory) self.directory = directory self.db_name = db_name self.db = Database(directory, db_name) self.game = Game(db=self.db) self.game_id = None def create_game(self) -> None: self.game_id = self.db.create_bingo_game() def create_ticket(self, entry: dict) -> int: cs = Create_sheet(self.game_id, entry) ticket_data = cs.create_tickets() ticket_id = 0 if ticket_data: name = ticket_data["name"] combinations = ticket_data["numbers"][0] amount = ticket_data["amount"] path = ticket_data["path"] ticket_id = self.db.create_bingo_sheet(self.game_id, path, name, amount, combinations) return ticket_id def get_all_games(self) -> list: games = self.db.get_all_games() games = [{ "game_id": game_id, "combinations": combination, "created_datetime": c_datetime, } for (game_id, combination, c_datetime) in games] return games def open_game(self) -> tuple: tickets = [] num_of_tickets, ticket_data = self.db.open_tickets_from_game( self.game_id) for ticket_datum in ticket_data: ( ticket_id, game_id, path, name, amount, combinations, created_datetime, ) = ticket_datum tickets.append({ "ticket_id": ticket_id, "game_id": game_id, "path": path, "name": name, "amount": amount, "combinations": combinations, "created_datetime": created_datetime, }) return num_of_tickets, tickets def open_ticket(self, filepath: str) -> None: webbrowser.open(f"file://{filepath}") def generate_winning_combination(self, game_id: int) -> None: combination = self.game.generate_winning_combination(game_id) self.game.add_combination_to_database(game_id, combination) def get_combination_from_game(self) -> tuple: combination = self.db.get_combination(self.game_id) return combination[0][0] def reset_app(self) -> None: try: if platform.system() == "Windows": path = os.getenv("APPDATA") os.remove(f"{path}\\{self.db_name}") shutil.rmtree(f"{path}\\tickets") else: os.remove(f"{self.directory}/{self.db_name}") shutil.rmtree(f"{self.directory}/tickets") except: pass
from database import Database db = Database() class Effect: #class to interact with effect of skills, attributes and items def __init__(self, id, initiator, bystander, battleflow): #transfer Player, Monster and Battleflow class objects to Effect class so it can be referred to dynamically self.id = id self.initiator = initiator self.bystander = bystander self.battleflow = battleflow def instant(self): #instants are activations that happen immediately after casting pass def persist(self): #persists are activations that happen in a different phase after casting pass def deactivation(self): #deactivations are activations that happen before the effect class object is deleted from Battleflow pass class Skill(Effect): #class to interact with skill objects def __init__(self, id, initiator, bystander, battleflow): super().__init__(id, initiator, bystander, battleflow) #returns True for rows that fulfill the criterias and False for others self.match = db.SkillDatabase['id'] == self.id self.name = db.SkillDatabase['name'] == self.id
from database import Database from Tkinter import * import sqlite3 import tkMessageBox global User_database global IPentry try: User_database = Database("FAKE_DRIVB_DB") User_database.create_table("Users", "IP string, Authorization string") except sqlite3.OperationalError: pass def input_new(): User_database.delete_by_column("Users", "IP", IPentry.get()) add_user(event=1) def check_ip(ip): x = True if '.' in ip: ip = ip.split(".") if len(ip) != 4: return False try: for byte in ip: x = x and (0 <= int(byte) <= 255) except ValueError: x = False
(opts, args) = p.parse_args() if opts.readdb == "NONE": print("Database not defined") sys.exit(0) else: db_name = opts.readdb """ Get database connection """ user = "******" passwd = "citoKUKU123" db = Database(db_name,user,passwd) db.getConnection() """ Read input id for the stock """ this_string = input("Enter item description:") """ Select item_description and qunatity from sku_tbl and stock """ sql_string = "SELECT stock_id, item_description, quantity, stock_reference FROM stock WHERE \ item_description LIKE '%"+str(this_string)+"%'" print sql_string #sys.exit(0) cursor = db.selectSQL(sql_string)
''' Esse arquivo é o primeiro arquivo a ser executado, já que trata-se de uma tela de login. Se tudo der certo, o arquivo main.py será chamado ''' # importando a classe Database do arquivo database e também o responsável por controlar senhas from database import Database import main import getpass # instanciando o banco de dados utilizado lá no arquivo database.py db=Database('company') def tela_login(): print('\n') print(' ############################################################################') print(' ############### CRUD-MYSQL ###############') print(' ############### -- Tela Login -- ###############') print(' ############################################################################') print(' Para iniciar selecione uma das opções abaixo:') option=int(input(' 1. Login\n 2. Novo usuário\n --> ')) if option == 1: login() elif option == 2: cria_usuario() else: print(' Opção inválida!') tela_login() # função para realizar o login def login():
import json import requests from database import Database # header = db = Database() db.connect() def handle_json_list(dict_json_list): for item in dict_json_list: record ={} for (key , value) in item.items(): if key == 'sources': if 'amazon_prime' in value: record["reelgood_is_amazon"] = 1 if 'hulu_plus' in value: record["reelgood_is_hulu"] = 1 if 'netflix' in value: record["reelgood_is_netflix"] = 1 else: record["reelgood_" + key] = value db.insert_dict(record , 'reelgood') print(record) def request_api(sources , current_num): header = {"accept": "text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,image/apng,*/*;q=0.8", "accept-encoding": "gzip, deflate, br", "accept-language": "en-US,en;q=0.9,zh-TW;q=0.8,zh;q=0.7", "cache-control": "max-age=0",
def getPostDate(imageName): return int( Database.query('select strftime("%s", uploadtime) from uploads where image=?', [ imageName ], one=True)[0] )
def save_to_mongo(self): Database.insert(collection='blogs', data=self.json())
class Datasource(object): def __init__(self, mojo_dir, tmp_dir, query, input_format, output_format, sub_dir): self.__mojo_dir = mojo_dir self.__mojo_tmp_dir = tmp_dir self.__query = query self.__input_format = input_format self.__output_format = output_format self.__sub_dir = sub_dir # self.__out_dir = out_dir # {'numBytesPerVoxel': '4', 'numVoxelsPerTileZ': '1', 'numVoxelsX': '1024', 'numVoxelsPerTileY': '512', 'numVoxelsPerTileX': '512', 'dxgiFormat': 'R32_UInt', 'numTilesX': '2', 'numTilesY': '2', 'numTilesZ': '20', 'fileExtension': 'hdf5', 'numTilesW': '2', 'numVoxelsZ': '20', 'numVoxelsY': '1024', 'isSigned': 'false'} self.__info = None # this is required to connect the deepzoom protocol to mojo zoom self.__max_deepzoom_level = 0.0 self.__max_mojozoom_level = 0.0 # Actual z-stack and max 3D z-stack self.__max_z_tiles = 0 self.__zSample_max = 80 self.__has_colormap = False self.__colormap = None self.__database = None self.__volume = None # file system regex self.__info_regex = re.compile('.*tiledVolumeDescription.xml$') self.__colormap_file_regex = re.compile('.*colorMap.hdf5') self.__segmentinfo_file_regex = re.compile('.*segmentInfo.db$') # handler regex self.__query_toc_regex = re.compile('^/' + self.__query + '/contents$') self.__query_tilesource_regex = re.compile('^/' + self.__query + '/\d+/$') self.__query_tile_regex = re.compile('^/' + self.__query + '/\d+/\d+/\d+_\d+.' + self.__output_format + '$') self.__query_volume_regex = re.compile('^/' + self.__query + '/volume/\d+/&.RZ$') self.__query_colormap_regex = re.compile('^/' + self.__query + '/colormap$') self.__query_segmentinfo_regex = re.compile('^/' + self.__query + '/segmentinfo$') self.__query_id_tile_index_regex = re.compile('^/' + self.__query + '/id_tile_index/\d+$') self.__setup() def get_info(self): return self.__info def get_input_format(self): return self.__input_format def get_max_zoomlevel(self): return self.__max_mojozoom_level def __setup(self): # parse the mojo directory root = os.path.join(self.__mojo_dir, self.__sub_dir) files = os.listdir(root) for f in files: f_with_dir = os.path.join(root, f) ########### uf_with_dir = f_with_dir.replace(os.sep, '/') ########### # info file if self.__info_regex.match(uf_with_dir): tree = ET.parse(f_with_dir) xml_root = tree.getroot() self.__info = xml_root.attrib # set the max deepzoom zoom level self.__max_deepzoom_level = int( math.log(int(self.__info['numVoxelsX']), 2)) # set the max mojo zoom level self.__max_mojozoom_level = int( math.ceil( math.log( float(self.__info['numVoxelsPerTileX']) / float(self.__info['numVoxelsX']), 0.5))) # get the max number of Z tiles self.__max_z_tiles = int(self.__info['numTilesZ']) # get the file format self.__input_format = str(self.__info['fileExtension']) # width and height self._width = int(self.__info['numVoxelsX']) self._height = int(self.__info['numVoxelsY']) self._xtiles = int(self.__info['numTilesX']) self._ytiles = int(self.__info['numTilesY']) self._voxPerTileX = int(self.__info['numVoxelsPerTileX']) self._voxPerTileY = int(self.__info['numVoxelsPerTileY']) # colormap elif self.__colormap_file_regex.match(f_with_dir): hdf5_file = h5py.File(f_with_dir, 'r') list_of_names = [] hdf5_file.visit(list_of_names.append) self.__has_colormap = True self.__colormap = hdf5_file[list_of_names[0]][()] #print('This is a check of color data in datasource.py line 120') #print(list_of_names[0]) #print(self.__colormap) # segmentinfo database elif self.__segmentinfo_file_regex.match(f_with_dir): old_db_file = os.path.join(root, f) # new_db_file = old_db_file.replace(self.__mojo_dir, self.__out_dir+'/') # os.mkdir(self.__out_dir+'/ids') # print 'Copied DB from', old_db_file, 'to', new_db_file # shutil.copy(old_db_file, new_db_file) print('Connecting to DB') self.__database = Database(old_db_file) # grab existing merge table self.__database._merge_table = self.__database.get_merge_table( ) self.__database._lock_table = self.__database.get_lock_table() def reconfigure(self): self.__setup() def get_info_xml(self): xml_info = '<?xml version="1.0" encoding="UTF-8"?>\n' xml_info += '<Image xmlns="http://schemas.microsoft.com/deepzoom/2008" TileSize="' + self.__info[ 'numVoxelsPerTileX'] + '" Overlap="0" Format="' + self.__output_format + '"><Size Width="' + self.__info[ 'numVoxelsX'] + '" Height="' + self.__info[ 'numVoxelsY'] + '"/></Image>' return xml_info def get_tile(self, file): pass def get_database(self): return self.__database def get_volume(self, zoomlevel): w_path = os.path.join(self.__mojo_dir, self.__sub_dir, 'tiles', 'w=' + str(zoomlevel).zfill(8)) w_path_tmp = os.path.join(self.__mojo_tmp_dir, self.__sub_dir, 'tiles', 'w=' + str(zoomlevel).zfill(8)) dirs = sorted(os.listdir(w_path)) tile_files = [] for d in dirs: if d.startswith('.'): continue files = os.listdir(os.path.join(w_path, d)) for f in files: if f.startswith('.'): continue # check if we have an updated version for this tile if os.path.exists(os.path.join(w_path_tmp, d, f)): tile_files.append(os.path.join(w_path_tmp, d, f)) else: tile_files.append(os.path.join(w_path, d, f)) return tile_files def handle(self, request): ''' React to a HTTP request. ''' content_type = 'text/html' content = None # table of contents if self.__query_toc_regex.match(request.uri): content = {} content['zSample_max'] = self.__zSample_max content['max_z_tiles'] = self.__max_z_tiles content['colormap'] = str(self.__has_colormap).lower() content['width'] = self.__info['numVoxelsX'] content['height'] = self.__info['numVoxelsY'] content['zoomlevel_count'] = self.__info['numTilesW'] content = json.dumps(content) # tile elif self.__query_tile_regex.match(request.uri): request_splitted = request.uri.split('/') tile_x_y = request_splitted[-1].split('.')[0] tile_x, tile_y = tile_x_y.split('_') zoomlevel = int(request_splitted[-2]) slice_number = request_splitted[-3] updated_tile_file = os.path.join( self.__mojo_tmp_dir, self.__sub_dir, 'tiles', 'w=' + str(zoomlevel).zfill(8), 'z=' + slice_number.zfill(8), 'y=' + tile_y.zfill(8) + ',' + 'x=' + tile_x.zfill(8) + '.' + self.__input_format) # print(updated_tile_file) if os.path.exists(updated_tile_file): content, content_type = self.get_tile(updated_tile_file) return content, content_type tile_file = os.path.join( self.__mojo_dir, self.__sub_dir, 'tiles', 'w=' + str(zoomlevel).zfill(8), 'z=' + slice_number.zfill(8), 'y=' + tile_y.zfill(8) + ',' + 'x=' + tile_x.zfill(8) + '.' + self.__input_format) # print(tile_file) if os.path.exists(tile_file): content, content_type = self.get_tile(tile_file) # volume elif self.__query_volume_regex.match(request.uri): request_splitted = request.uri.split('/') zoomlevel = int(request_splitted[-2]) content, content_type = self.get_volume(zoomlevel) # # tile source info # elif self.__query_tilesource_regex.match(request.uri): # content = self.get_info_xml() # # # segment info # elif self.__query_segmentinfo_regex.match(request.uri): # content = json.dumps(self.__database.get_segment_info()) # # # tile index # elif self.__query_id_tile_index_regex.match(request.uri): # request_splitted = request.uri.split('/') # tile_id = request_splitted[-1] # content = json.dumps(self.__database.get_id_tile_index(tile_id)) # Only colormap for segmentation elif self.__has_colormap and self.__query_colormap_regex.match( request.uri): content = json.dumps(self.__colormap.tolist()) return content, content_type
def from_mongo(cls, id): blog_data = Database.find_one(collection='blogs', query={'id': id}) return cls(author=blog_data['author'], title=blog_data['title'], description=blog_data['description'], id=blog_data['id'])
from configparser import ConfigParser from flask import Flask, request from linebot import LineBotApi, WebhookParser from database import Database from avril.models import create_all from avril.controllers import conversation_history_bp from examples.echo import EchoBot, MultiTurnEchoBot # load config config = ConfigParser() config.read("./config.ini") # create db db = Database(config["DATABASE"]["connection_string"]) # create and configure app app = Flask(__name__) app.bot = EchoBot( line_api=LineBotApi(config["LINE_API"]["channel_access_token"]), line_parser=WebhookParser(config["LINE_API"]["channel_secret"]), db_session_maker=db.session, logger=app.logger) app.register_blueprint(conversation_history_bp) @app.route("/bot/webhook_handler", methods=["POST"]) def handle_webhook(): # put webhook request data to queue app.bot.enqueue_webhook( request.data.decode("utf-8"), request.headers.get("X-Line-Signature") )
def __setup(self): # parse the mojo directory root = os.path.join(self.__mojo_dir, self.__sub_dir) files = os.listdir(root) for f in files: f_with_dir = os.path.join(root, f) ########### uf_with_dir = f_with_dir.replace(os.sep, '/') ########### # info file if self.__info_regex.match(uf_with_dir): tree = ET.parse(f_with_dir) xml_root = tree.getroot() self.__info = xml_root.attrib # set the max deepzoom zoom level self.__max_deepzoom_level = int( math.log(int(self.__info['numVoxelsX']), 2)) # set the max mojo zoom level self.__max_mojozoom_level = int( math.ceil( math.log( float(self.__info['numVoxelsPerTileX']) / float(self.__info['numVoxelsX']), 0.5))) # get the max number of Z tiles self.__max_z_tiles = int(self.__info['numTilesZ']) # get the file format self.__input_format = str(self.__info['fileExtension']) # width and height self._width = int(self.__info['numVoxelsX']) self._height = int(self.__info['numVoxelsY']) self._xtiles = int(self.__info['numTilesX']) self._ytiles = int(self.__info['numTilesY']) self._voxPerTileX = int(self.__info['numVoxelsPerTileX']) self._voxPerTileY = int(self.__info['numVoxelsPerTileY']) # colormap elif self.__colormap_file_regex.match(f_with_dir): hdf5_file = h5py.File(f_with_dir, 'r') list_of_names = [] hdf5_file.visit(list_of_names.append) self.__has_colormap = True self.__colormap = hdf5_file[list_of_names[0]][()] #print('This is a check of color data in datasource.py line 120') #print(list_of_names[0]) #print(self.__colormap) # segmentinfo database elif self.__segmentinfo_file_regex.match(f_with_dir): old_db_file = os.path.join(root, f) # new_db_file = old_db_file.replace(self.__mojo_dir, self.__out_dir+'/') # os.mkdir(self.__out_dir+'/ids') # print 'Copied DB from', old_db_file, 'to', new_db_file # shutil.copy(old_db_file, new_db_file) print('Connecting to DB') self.__database = Database(old_db_file) # grab existing merge table self.__database._merge_table = self.__database.get_merge_table( ) self.__database._lock_table = self.__database.get_lock_table()
def setUp(self): connect_str = "dbname='diary_db_test' user='******' host='localhost' " + "password='******'" os.environ['DATABASE_URL'] = connect_str self.db = Database() self.db.create_users_table() self.db.create_entries_table()
class Article: path_to_dir = os.path.dirname(os.path.abspath(__file__)) def __init__(self, args=None): Article.init_data() # Database is not in use at the time... if not args == None: username = args['database']['user'] password = args['database']['pass'] database = args['database']['database_name'] self.db = Database(user, password, database) self.db_is_usable = True else: print('Cannot make a database connection...') self.db_is_usable = False @classmethod def init_data(cls): # Try to make the Data directory # if is doesn't already exist try: os.mkdir(Article.path_to_dir + '/Data') except: pass @classmethod def get_domain(cls, url): parsed_url = urlparse(url) return str(parsed_url.netloc) """ Returns a stripped version of the actual page """ @classmethod def get_page(cls, url): content = (requests.get(url)).text soup = BeautifulSoup(content, 'html5lib') # Extract every tag that's unimportant [s.extract() for s in soup('script')] [s.extract() for s in soup('noscript')] [s.extract() for s in soup('img')] [s.extract() for s in soup('video')] [s.extract() for s in soup('audio')] # Remove comments comments = soup.findAll(text=lambda text: isinstance(text, Comment)) [comment.extract() for comment in comments] # Remove whitespace html = str(soup.body) return "".join( re.sub(r"[\n\t]*", "", line) for line in html.split("\n")) @classmethod def save_rule(cls, file_name, rule): with open(file_name, 'a') as fw: fw.write(rule) @classmethod def get_rules(cls, file_name): file_content = "".join(Article.get_file(file_name)) if file_content == "": return [] response = [ str(line) for line in BeautifulSoup(file_content, "html5lib").findAll() ] # Array of rules for x in response: for y in response: if x.find(y) > -1: response.remove(y) return response @classmethod def get_selector(cls, element): # It works.. Don't mess with it elem_id = '' elem_class = '' elem_selector = element.name try: elem_id = '#' + element.attrs['id'] except: pass try: elem_class = [ '.' + class_name for class_name in element.attrs['class'] ][0] except: pass if not elem_id == '': elem_selector += elem_id if not elem_class == '': elem_selector += elem_class elif not elem_class == '': elem_selector += elem_class return elem_selector @classmethod def check_lines(cls, html, rules): response = [] html = BeautifulSoup(html, 'html5lib').findAll() selectors = [] for element in html: element_selector = Article.get_selector(element) children = element.children for child in children: found_selector = False for line in selectors: selector = line[0] latest_element = line[1] if element_selector == latest_element: #print(element_selector, latest_element, Article.get_selector(child)) line[0] += str(' > ' + Article.get_selector(child)) line[1] = element_selector print(selectors) found_selector = True break if not found_selector: # markup: ['selector', 'latest element'] selectors.append([element_selector, element_selector]) print(selectors) for tag in html: if tag.text == '': # If the tag contains no text # it's useless and is deleted del tag else: # If tag is like one of the rules if not get_close_matches(str(tag), rules, cutoff=0.6) == []: #print('Matched:', str(tag)) del tag # If tag isn't like one of the rules else: #print('Added:', str(tag)) response.append(str(tag)) for x in response: for y in response: if x.find(y) > -1: response.remove(y) return response """ Returns an array of lines with the content of the requested file. """ @classmethod def get_file(cls, file_name): # Check if file is relative or # absolute and make it absolute if not re.search(r"%s" % (Article.path_to_dir, ), file_name): file_name = "%s/%s" % ( Article.path_to_dir, file_name, ) # Check if file doesn't already exist.. # If not, create the file.. if not os.path.isfile(file_name): with open(file_name, 'w+') as fw: fw.write("") with open(file_name, 'r+') as fr: file_content = fr.readlines() # Returns an array of rules # Remove \n's return [re.sub(r"\n", "", line) for line in file_content] """ Returns the main text retrieved from the requested url """ def get_article(self, url): domain = Article.get_domain(url) #TODO: Create table learning if self.db_is_usable: query = "SELECT * FROM learning WHERE base_url = %s" % (domain, ) response = self.db.fetch(query) file_name = 'Data/' + domain + '.txt' rules = Article.get_rules(file_name) html = Article.get_page(url) print("Checking url:", url) if not rules == []: return Article.check_lines(html, rules) else: # No rules yet... Article.save_rule(file_name, str(html)) return Article.check_lines(html, rules)
def find_by_author_id(cls,author_id): blogs = Database.find(collection='blogs',query={'author_id': author_id}) return [cls(**blog) for blog in blogs]
PeerIdInvalid from configs import Config from database import Database ## --- Sub Configs --- ## BOT_USERNAME = Config.BOT_USERNAME BOT_TOKEN = Config.BOT_TOKEN API_ID = Config.API_ID API_HASH = Config.API_HASH DB_CHANNEL = Config.DB_CHANNEL ABOUT_BOT_TEXT = Config.ABOUT_BOT_TEXT ABOUT_DEV_TEXT = Config.ABOUT_DEV_TEXT HOME_TEXT = Config.HOME_TEXT BOT_OWNER = Config.BOT_OWNER FORWARD_AS_COPY = Config.FORWARD_AS_COPY db = Database(Config.DATABASE_URL, BOT_USERNAME) broadcast_ids = {} Bot = Client(BOT_USERNAME, bot_token=BOT_TOKEN, api_id=API_ID, api_hash=API_HASH) async def send_msg(user_id, message): try: await message.forward(chat_id=user_id) return 200, None except FloodWait as e: await asyncio.sleep(e.x) return send_msg(user_id, message) except InputUserDeactivated: return 400, f"{user_id} : deactivated\n" except UserIsBlocked: return 400, f"{user_id} : blocked the bot\n"
def from_mongo(cls,blog_id): blog_data = Database.find_one(collection='blogs',query={'_id': blog_id}) return cls(**blog_data)
from flask import Flask, render_template, request, redirect, url_for, session from database import Database STATUS_DICT = { 3: 'админ', 2: 'мастер', 1: 'пользователь', 0: 'не назначен', -1: 'забанен' } REQ_SPELL_LABELS = { 'spell_title', 'spell_cost', 'learning_const', 'description' } db = Database('ivan', 'strongsqlpassword') app = Flask(__name__) app.secret_key = 'very secret and reliable secret key' # TODO: add decorators to check login on user-only pages @app.route('/', methods=['GET', 'POST']) def login(): if 'loggedin' in session: return redirect(url_for('home')) msg = '' if request.method == 'POST' and 'username' in request.form and 'password' in request.form: username = request.form['username'] password = request.form['password']
import discord_logging import praw import prawcore from datetime import datetime log = discord_logging.init_logging() from database import Comment, User, Submission, Database database = Database() reddit = praw.Reddit("Watchful1") users = database.session.query(User).filter_by(created=None).all() log.info(f"Processing {len(users)} users") processed = 0 deleted = 0 for user in users: processed += 1 try: r_user = reddit.redditor(user.name) user_created = datetime.utcfromtimestamp(r_user.created_utc) user.created = user_created except prawcore.exceptions.NotFound: user.is_deleted = True deleted += 1 except AttributeError: user.is_deleted = True deleted += 1 if processed % 100 == 0: log.info(f"{processed}/{len(users)} : {deleted}")
def find_all_blogs(cls): blogs = Database.find(collection='blogs',query={}) return [cls(**blog) for blog in blogs]
def scrap(): global db locale.setlocale(locale.LC_TIME, 'pt_BR.utf8') db = Database() _scrap()
from utils import load_data, load_template, add_dic, build_response from database import Database from database import Note import database from urllib.parse import * db = Database('banco') def index(request): # Cria uma lista de <li>'s para cada anotação # Se tiver curiosidade: https://docs.python.org/3/tutorial/datastructures.html#list-comprehensions # A string de request sempre começa com o tipo da requisição (ex: GET, POST) if request.startswith('POST'): request = request.replace('\r', '') # Remove caracteres indesejados # Cabeçalho e corpo estão sempre separados por duas quebras de linha partes = request.split('\n\n') corpo = partes[1] params = {} # Preencha o dicionário params com as informações do corpo da requisição # O dicionário conterá dois valores, o título e a descrição. # Posteriormente pode ser interessante criar uma função que recebe a # requisição e devolve os parâmetros para desacoplar esta lógica. # Dica: use o método split da string e a função unquote_plus for chave_valor in corpo.split('&'): chave_valor = unquote_plus(chave_valor) split2 = chave_valor.split('=') params[split2[0]] = split2[1]
import string import os from database import Database app = Flask(__name__) CORS(app) # Setting Switch deploy = True development_address = "127.0.0.1" deployment_address = "3.24.141.26" server_address = deployment_address if deploy else development_address # Setting up DB. db_address = 'mongodb://' + server_address + ':27017/' db = Database(db_address, "hyperlynk", "OnePurpleParrot") db.connect() db.init() # Generate a random device id. def generate_device_id(length=16): return ''.join( random.choice(string.ascii_letters + "1234567890") for i in range(length)) # Generic Replies def response(msg='', payload={}): return jsonify({'Message': str(msg), 'Payload': payload})
def sqlCompiler(usrInput, db=None): sanitizedUsrInput = usrInput.upper() # dataBase = db #Removes unnecessary '', "", and () sanitizedUsrInput = re.sub(re.compile('[^A-Za-z\s,*1234567890<>=]'), "", sanitizedUsrInput) # Splits the input in a list of 4 (so you can select the first 3 to determine what to do) i.e. create database/table etc args = sanitizedUsrInput.split(" ", 3) #The main principle is that you split the usrInput and see what combination of 1st, 2nd etc. elements matches the SQL syntax if args[0] == "CREATE": if args[1] == "DATABASE": try: if args[2] != "": db = Database(args[2], load=False) return (db, "Successfully created the db") #,"Created database {}".format(args[2]) else: return ("Please provide a name for your database") except: pass elif args[1] == "TABLE": if args[2] != "": #removes the (now unnecessary) first 2 elements args.pop(0) args.pop(0) #takes the name of the table tableName = args.pop(0) args2 = ''.join(args) args2 = args2.split(',') #args2 now contains elements in the form 'fieldname datatype' newstr = "" fieldNames = [] dataTypes = [] #loops through args2 and splits the fieldnames and the datatypes in 2 different lists for x in args2: newstr += x args3 = newstr.split(' ') fieldNames.append(args3[1]) dataTypes.append(args3[2]) newstr = "" #loops through datatypes and converts the varchar and int to their appropriate 'str' and 'int' counterparts for item in dataTypes: if item == 'VARCHAR': index = dataTypes.index(item) dataTypes.remove(item) dataTypes.insert(index, str) elif item == 'INT': index = dataTypes.index(item) dataTypes.remove(item) dataTypes.insert(index, int) else: pass #Sets as PK the FIRST column db.create_table(tableName, fieldNames, dataTypes, fieldNames[0]) return (db, "Successfully created the table") # return(db,"Successfully created the table") else: pass elif args[1] == "INDEX": if args[2] != "": try: args.pop(0) args.pop(0) indexName = args.pop(0) args2 = ''.join(args) args2 = args2.replace("ON ", "") args3 = args2.split(" ") tbName = args3[0] db.create_index(tbName, indexName) return (db, "Successfully created the index") except: pass else: return (usrInput, " Is a wrong format") elif args[0] == "DROP": if args[1] == "DATABASE": if args[2] != "": #Again,you remove the first 2 elements and save the database name so you can use it to delete the db later try: args.pop(0) args.pop(0) dbName = args.pop(0) db.drop_db() return (db, "Successfully dropped the database") except: pass else: return ("Wrong format") elif args[1] == "TABLE": if args[2] != "": #You save the name, you drop the table try: args.pop(0) args.pop(0) tbName = args.pop(0) db.drop_table(tbName) return (db, "Successfully dropped the table") except: pass else: pass elif args[0] == "SELECT": if args[1] == "*": #TODO maybe implement select with conditions, right now only supports the select all (*) args.pop(0) args.pop(0) args.pop(0) tbName = args.pop(0) old_stdout = sys.stdout result = StringIO() sys.stdout = result db.select(tbName, '*') sys.stdout = old_stdout result_string = result.getvalue() #print(result_string) return (db, result_string) else: try: args.pop(0) leftTableName = args.pop(0) leftTableName = leftTableName.replace(",", "") rightTableName = args.pop(0) rightTableName = rightTableName.replace(",", "") newstr = args[0] args2 = newstr.split(" ") condition = args2[-1] # Does not work... db.inner_join(leftTableName, rightTableName, condition) return (db, "Successfully created the index") except: pass elif args[0] == "UPDATE": if args[1] != "": try: tbName = args[1] args.pop(0) args.pop(0) args.pop(0) newstr = "" newstr += args[0] temp = newstr.split("WHERE ") condition = temp[1] columnsAndValues = temp[0] listOfcolmnsAndVals = columnsAndValues.split(',') newstr = "" setColumns = [] setValues = [] args3 = [] for i in listOfcolmnsAndVals: newstr += i newstr = newstr.replace(" ", "") args3 = newstr.split('=') setColumns.append(args3[0]) setValues.append(args3[1]) newstr = "" # Doesn't work for multiple values i.e. cant use the lists i have created. # Also, to change the values the condition must be the same column as the value you want to change # For example, if you want to change the personid from 10 to 15 the condition would be 'where personid > 2' # Still dont know how that works... db.update(tbName, setValues[0], setColumns[0], condition) return (db, "Successfully updated the row(s)") except: pass elif args[0] == "INSERT": if args[1] == "INTO": try: args.pop(0) args.pop(0) #You take the table name tbName = args.pop(0) if tbName != "": newstr = "" newstr += args[0] #All the values you want to insert you split them in a list so #Right now the only supported way (by the database.py base code) is to insert a value in every column newstr = newstr.replace("VALUES ", "") newstr = newstr.replace(" ", "") values = [] values = newstr.split(',') db.insert(tbName, values) return (db, "Successfully inserted the data") except: pass else: return (db, "Please provide a name for your table") elif args[0] == "DELETE": if args[2] != "": try: args.pop(0) args.pop(0) tbName = args.pop(0) #You take the name of the table you want to delete rows from conditions = args[0] conditions = conditions.replace("WHERE ", "") #You sanitize the conditions sting (the function takes input as 'id>10') thats why you take the "conditions" part as a hole string db.delete(tbName, conditions) return (db, "Successfully deleted the row(s)") #TODO if i use the equal ("=") in the condition part, it breaks... (no idea why) except: pass elif args[0] == "USE": if args[1] == "DATABASE": #In order to select something from a database you have to create a new db #No way to just select from an existing db, hence the need to "select" the desired db #It uses the same create function from database.py BUT the "load" flag is TRUE args.pop(0) args.pop(0) dbName = args.pop(0) if dbName != "": # return(db,type(dbName)) # return(db,dbName) db = Database(dbName, load=True) return (db, "Loaded the database") else: return (db, "Wrong format") else: return (db, usrInput, " Is a wrong format")
def get_all_instances(): return Database.get_all_instances(LabModel.__collection_id)
stringFormat[sender.text().lower()], self.inputComponents['key'].currentText()) print(user_input) k = db.run(user_input) print(k) self.resultComponent[1].setFontWeight(100) self.resultComponent[1].append("Command => {0}".format(user_input)) self.resultComponent[1].setFontWeight(1) for row in k: result = "" for info in row: result += "{0}={1}\t\t".format(str(info), str(row[info])) self.resultComponent[1].append(result) self.resultComponent[1].append('\n') pass except Exception as e: self.resultComponent[1].append(e.__str__()) pass def closeEvent(self, event): db.quit_db() if __name__ == '__main__': app = QApplication(sys.argv) db = Database("20171648-양기현-assignment3.dat") ex = ScoreDB(db) sys.exit(app.exec_())
def push(self): Database.insert(collection=self.__collection_id, data=self.jsonify())
def delete_one(lab_number): Database.delete_one(LabModel.__collection_id, query={'labNumber': lab_number})
def delete_one(cart_number): Database.delete_one(CartModel.__collection_id, query={'cartNumber': cart_number})
def pull(cls, lab_number): data = Database.find_one(collection=cls.__collection_id, query={'labNumber': lab_number}) return cls(lab_number=data['labNumber'], device_quantity=data['deviceQuantity'])