def __init__(self, path): Db.__init__(self, {"db_name": "ContentDb", "tables": {}}, path) self.foreign_keys = True self.schema = self.getSchema() self.checkTables() self.site_ids = {} self.sites = {}
def executeFlow(self): self.db = Db() query_get_submission = "SELECT submission_id, file_name, question_id FROM submission WHERE status='pending' ORDER BY time_of_submission ASC LIMIT 1" results = self.db.executeQuery(query_get_submission) if results == (): return else: self.sub_id = str(results[0][0]) file_name = results[0][1] question_id = results[0][2] query_update_status = "UPDATE submission SET status='processing' WHERE submission_id =" + self.sub_id self.db.executeQuery(query_update_status) submission_file = File(self.sub_id, file_name) submission_file.downloadFile() evaluate = Evaluator(submission_file, question_id) result = evaluate.executeSource() count_success = 0 count_total = 0 if result == [0]: self.final_result = "compilation error" if result == [1]: self.final_result = "runtime error" else: for success in result: count_total = count_total + 1 if success == 2: count_success = count_success + 1 self.final_result = str(count_success) + " out of " + str( count_total) + " are correct " self.updateDatabase()
def main(): db = Db(dbconfig) print(initial_url) home_page = get_one_page(initial_url) urllist = parse_home_page(home_page) print(urllist) for url in urllist: category = re.findall(r'cat=(.*?)&', url)[0] for sorttype in sort: sortid = sort2id[sorttype] sort_nums = 0 for i in range(10): # realurl = '{0}sort={1}&s={2}&{3}'.format(baseurl, sorttype, 60*i, tailurl) realurl = '{0}sort={1}&s={2}&{3}'.format(baseurl, sorttype, i*60, url) print(realurl) itempage = get_one_page(realurl) # print(itempage) soup = BeautifulSoup(itempage, 'lxml') itemlist = soup.findAll(name="div", attrs={"class":re.compile(r"product (\s\w+)?")}) # print(itemlist) print(sort_nums) for item in itemlist: # print(item) result = parse_one_page(item, category, sort_nums, sortid) db.insert('website_tmall_inter', result) sort_nums = sort_nums + 1 time.sleep(randint(25, 45)) if len(itemlist) < 60: break
def gorev_sil(id): database = Db() sql = """ delete from "gorev" where "Id" = %s """ database.execute(sql,(id,)) return redirect("/")
def testDb(self): from Db import Db for db_path in [os.path.abspath("%s/test/zeronet.db" % config.data_dir), "%s/test/zeronet.db" % config.data_dir]: print "Creating db using %s..." % db_path, schema = { "db_name": "TestDb", "db_file": "%s/test/zeronet.db" % config.data_dir, "map": { "data.json": { "to_table": { "test": "test" } } }, "tables": { "test": { "cols": [ ["test_id", "INTEGER"], ["title", "TEXT"], ], "indexes": ["CREATE UNIQUE INDEX test_id ON test(test_id)"], "schema_changed": 1426195822 } } } if os.path.isfile("%s/test/zeronet.db" % config.data_dir): os.unlink("%s/test/zeronet.db" % config.data_dir) db = Db(schema, "%s/test/zeronet.db" % config.data_dir) db.checkTables() db.close() # Cleanup os.unlink("%s/test/zeronet.db" % config.data_dir) os.rmdir("%s/test/" % config.data_dir)
class LldpCtl(lldpy.Watcher): """ A custom class to hook into llpdctl utility. """ def __init__(self, interface): super(LldpCtl, self).__init__() self.location = socket.gethostname().upper() self.interface = interface self.db = Db() self.is_alive = True def on_add(self, local, remote): if local.interface_name == self.interface: self.location = self.db.get_topology(remote.chassis_name)[-1][3] self.location = self.location.upper() def on_delete(self, local, remote): if local.interface_name == self.interface: self.location = socket.gethostname() self.location = self.location.upper() def on_update(self, local, remote): if local.interface_name == self.interface: self.location = self.db.get_topology(remote.chassis_name)[-1][3] self.location = self.location.upper() def get_location(self): return self.location
def setID(self): """Set an id for the session.""" sessions = self.parent.sessions db = Db() if not sessions: self.ID = db.getFolio() + 1 else: self.ID = sessions[len(sessions) - 1].getID() + 1
def index(): db = Db() sql = """ select * from "gorev" """ data = db.read_data(sql) return render_template("index.html", data=data)
def speichereEvents(): if not Db.initialisiert: Db.init() for event in EventManager.events: if event.id is None: event.id = Db.addEvent(Db.conn, event) else: Db.updateEvent(Db.conn, event) Db.conn.commit()
def ladeEvents(): from TimeManager import TimeManager if not Db.initialisiert: Db.init() events = Db.erhalteAlleEventsAm(Db.conn, TimeManager.aktuellesDatum.datum) if len(events) > 0: EventManager.events = events for event in EventManager.events: event.zeichne()
def collector(self): """Collect and return all data to be recorded on the database.""" items = { "factura": self.orderTotal.getInvoice(), "descuento": self.orderTotal.getDcto()[0], "descuentop": self.orderTotal.getDcto()[1], "descuentoa": self.orderTotal.getDcto()[2], "Np": self.np, "Llevar": self.llevar } for key, value in items.items(): if not value or value is False: setattr(self, key, 0) else: if value is True: value = 1 setattr(self, key, value) db = Db() headerConfig = db.getConfigGroup("TICKET") data = { "imagen": headerConfig["imagen_ticket"], "titulo": headerConfig["titulo"], "direccion": headerConfig["direccion"], "regimen": headerConfig["regimen_fiscal"], "RFC": headerConfig["RFC"], "nombreFiscal": headerConfig["nombre"], "telLocal": headerConfig["telefono"], "folio": self.getID(), "nombre": self.nameField.getText(), "llevar": self.Llevar, # Capitalized because different "pagado": self.Np, # Capitalized because different "sexo": self.getSex(), "edad": self.getAge(), "notas": self.inputField.getText(), "factura": self.factura, "total": self.orderTotal.getTotal(nodcto=True), "subtotal": self.orderTotal.getSubtotal(), "iva": self.orderTotal.getVat(), "descuento": self.descuento, "descuentoa": self.descuentoa, "descuentop": self.descuentop, "cupon": self.orderTotal.getDcto()[3], "paga": self.paga, "cambio": self.cambio, "cancelado": self.cancelado, "productos": self.holder.getOrder().getItems(), "fecha": self.date, "hora": self.hour, "facRfc": self.invoiceRfc, "facTelefono": self.invoiceTel, "facEmail": self.invoiceEmail, "facNombre": self.invoiceName, "facUso": self.invoiceUse } return data
def speichereZeiten(): if not Db.initialisiert: Db.init() for zeit in TimeManager.zeiten: if zeit.id is None: zeit.id = Db.addZeit(Db.conn, zeit) else: Db.updateZeit(Db.conn, zeit) Db.conn.commit()
def removeEvent(event): if not Db.initialisiert: Db.init() # lösche die Verknüpfungen über Vorheriges Element und folgendes Element if event.eventDanach is not None: event.eventDanach.eventDavor = None if event.eventDavor is not None: event.eventDavor.eventDanach = None Db.entferneEvent(Db.conn, event) EventManager.events.remove(event)
def process_scroll(process_no, lists, page_rows): dbconfig = { 'host': '127.0.0.1', 'port': 3306, 'user': '******', 'password': '******', 'db': 'tracking', 'charset': 'utf8' } db_class = Db(dbconfig) try: for line in lists[process_no * page_rows:(process_no + 1) * page_rows]: try: if '_source' not in line: continue data = line['_source']['message'] data_list = data.split(' ') if not data_list: continue if not isinstance(data_list, list): continue user_agent = '' for index in range(12, len(data_list)): user_agent = user_agent + (' ' if user_agent else '') + data_list[index] try: visit_time = str(datetime_now.datetime.strptime(data_list[4][1:], "%d/%b/%Y:%H:%M:%S")) except Exception as e: print(e) visit_time = '' sub_pos = user_agent.find('"', 2) if user_agent.find('"', 2) else len(user_agent) referer = data_list[11] if data_list[11] else '' # try: # if 0 < len(urllib.parse.unquote_plus(data_list[2] + data_list[7]).split('?')): # short_url = short_url[0] # except Exception as e: # print(e) # short_url = '' insert_data = {'ip': data_list[0], 'website_url': urllib.parse.unquote_plus(data_list[2] + data_list[7]), 'user_agent': escape(user_agent[1:sub_pos]), 'visit_time': visit_time, # 'short_url': short_url, 'referer': referer[1:-1]} insert_data['created_time'] = datetime_now.datetime.now().strftime("%Y-%m-%d %H:%M:%S") insert_data['md5_hash'] = hashlib.md5( (insert_data['ip'] + insert_data['user_agent'][0:255]).encode( 'utf8')).hexdigest() # 截取255个字符进行hash # insert_data['unique_hash'] = hashlib.md5( # (insert_data['ip'] + insert_data['user_agent'][0:255]).encode('utf8') + short_url).hexdigest() table_name = 'statistics_data' print(insert_data) id = db_class.insert(table_name, insert_data) print(id) except Exception as e: print(e) return str(os.getpid()) + ' done ' except Exception as e: print(e)
def test_viewer_exists_in_db(self): twitchusername = "******" channel = "TestChannel" dbconn = Db() dbconn.create_viewer(twitchusername, channel, -1) res = dbconn.viewer_exists(twitchusername, channel) self.cleanup() self.assertEqual(res, True)
def openDb(self, check=True): schema = self.loadJson("dbschema.json") db_path = self.getPath(schema["db_file"]) if check: if not os.path.isfile(db_path) or os.path.getsize(db_path) == 0: # Not exits or null self.rebuildDb() self.db = Db(schema, db_path) if check and not self.db_checked: changed_tables = self.db.checkTables() if changed_tables: self.rebuildDb(delete_db=False) # Todo only update the changed table datas
def test(): p = Prudence('ad', 'bb', 'localhost') user = p.get_raw_user() User = p.get_user() c = p.get_companies() d = Db('localhost') c1 = d.getCompanies() print type(c) print type(c[0]) self.assertEqual(type(c), type(c1), 'i tipi esterni no') self.assertEqual(type(c[0]), type(c1[0]), 'i tipi interni no')
def test(): p = Prudence('ad','bb','localhost') user = p.get_raw_user() User = p.get_user() c = p.get_companies() d = Db('localhost') c1 =d.getCompanies() print type(c) print type(c[0]) self.assertEqual(type(c),type(c1),'i tipi esterni no') self.assertEqual(type(c[0]),type(c1[0]),'i tipi interni no')
def cleanup(): Db.dbCloseAll() for dir_path in [config.data_dir, config.data_dir + "-temp"]: if os.path.isdir(dir_path): for file_name in os.listdir(dir_path): ext = file_name.rsplit(".", 1)[-1] if ext not in ["csr", "pem", "srl", "db", "json", "tmp"]: continue file_path = dir_path + "/" + file_name if os.path.isfile(file_path): os.unlink(file_path)
def __init__(self, path): Db.__init__(self, {"db_name": "ContentDb", "tables": {}}, path) self.foreign_keys = True try: self.schema = self.getSchema() self.checkTables() except Exception, err: self.log.error("Error loading content.db: %s, rebuilding..." % Debug.formatException(err)) self.close() os.unlink(path) # Remove and try again self.schema = self.getSchema() self.checkTables()
def gorev_duzenle(id): database = Db() sql = """ select * from "gorev" where "Id" = %s """ gorevim = database.read_first_data(sql,(id,)) data = { "id" : gorevim[0], "gorev_adi" : gorevim[1], "tamamlandimi": gorevim[2] } return render_template("duzenle.html",data= data)
def getWordList(): db = Db('loe3', 'root', 'youwonder.') wordlist = db.database('MetalArchives').table('Artist').select( 'distinct lyricalThemes').get() uniqueWords = [] for row in wordlist: words = row[0].split(',') for word in words: testWord = re.sub('\s\(.*?\)', '', word).strip().lower() if testWord not in uniqueWords: uniqueWords.append(testWord) return uniqueWords
def on_message(self, body, message): try: print "GOT MESSAGE %r " % message logger.info("Began consume message :: got body :: %r " % body) db = Db(logger) result = db.process_sdp_requests(json.loads(body)) logger.info("message terminate result :: %r " % result.status_code) if result.status_code == 200: message.ack() message.ack() except Exception, e: message.requeue() logger.info("Exception on message consume:: %r " % e)
def gorev_update(): tamamlandimi = False id = request.form.get('gorev_id') gorev_aciklama = request.form.get('gorev_aciklama') if request.form.get('tamamlandimi') == "on": tamamlandimi = True databse = Db() sql = """ update "gorev" set "GorevAdi" = %s, "TamamlandiMi" = %s where "Id" = %s """ databse.execute(sql,(gorev_aciklama,tamamlandimi,id)) return redirect("/")
def test(self, spec_filename): function_map = { "TEXT": str, "INTEGER": int, "BOOLEAN": int, "DATE": str } properties = Properties.getProperties() db = Db(properties) data_files = os.listdir("data") file_map = {} for data_file in data_files: file_info = data_file.split("_") if not file_info[0] in file_map: file_map[file_info[0]] = [] file_map[file_info[0]].append(data_file) with open("specs/" + spec_filename, "rt") as spec_file: filename_info = spec_filename.split(".") file = csv.reader(spec_file, delimiter=",") next(file) # Skip first row columns = [] for column in file: columns.append({ "name": column[0], "width": int(column[1]), "data_type": column[2] }) if filename_info[0] in file_map: for data_filename in file_map[filename_info[0]]: with open("data/" + data_filename, "rt") as data_file: for line in data_file: sql = "SELECT count(*) as total FROM {} WHERE ".format( filename_info[0]) index = 0 fields = [] for column in columns: fields.append(column["name"] + " = '" + str( function_map[column["data_type"]] (line[index:(index + column["width"])])) + "'") index += column["width"] sql += " AND ".join(fields) row = db.select(sql)[0] if row["total"] != 1: raise Exception( "Line: {} was not inserted".format( line.strip()))
def run(self): """ Add a key to the KEYS table, if the keys does not already exist. """ self.db = Db() self.db.connect() while (1): if self.stopped(): self.empty_queue() self.db.disconnect() break self.empty_queue() return
def printTicket(self, cancelled=False): """Simplified ticket printer.""" if self.orderTotal.getTotal() > 0: self.setTime() ticket = Ticket.Ticket(self.collector(), self, cancelled=cancelled) # if ticket.exec_(): # pass printer = Printer.Print() printer.Print(ticket) printer = None ticket.setParent(None) db = Db() db.recordTicket(self.collector()) self.parent.deleteSession(self, self.parent.sessionIndex(self))
async def first_keyboard(message): if message.chat.type == 'private': if message.text == '❌Delete API keys': h = message.from_user.id a = Db(db) a.destroy(h) await message.answer("Okey, deletion successful") elif message.text == '↩Back to main menu': markups = types.ReplyKeyboardMarkup(resize_keyboard=True) item11 = types.KeyboardButton("🧮Calculate entry volume") item21 = types.KeyboardButton("⚙Configure API Key⚙") markups.add(item11, item21) # добавляем кнопку на клавиатуре await message.answer( f"Hi, {message.from_user.full_name}!\nThis bot is designed to calculate the volume of entry into a trade on BinanceFutures. If you want to start, click on the '🧮Calculate entry volume' button and then follow the instructions.\nWe wish you a profitable trade💰!", reply_markup=markups)
async def sec_key(Secmessage, state: FSMContext): try: secKey = Secmessage skey = (secKey["text"][-64:]) h = Secmessage.from_user.id a = Db(db) a.insert_secret_key(skey, h) data = await state.get_data() answer1 = data.get("answer1") answer2 = secKey await Secmessage.answer("Okey, Keys added successfully!") await state.finish() except Exception: await Secmessage.answer("Error. Delete keys and try again") await state.finish()
def test_get_user(self): username = "******" dbconn = Db() now = datetime.now() now = now.replace(microsecond=0) formatted_date = now.strftime('%Y-%m-%d %H:%M:%S') user = User(username, "userpassword", "testchannel", "testbot", "testbotOAuth", "channelToken", formatted_date, formatted_date) dbconn.create_user(user) exists = dbconn.get_user(user.username) self.assertEqual(exists.created, now) self.cleanup()
def testCheckTables(self): db_path = "%s/zeronet.db" % config.data_dir schema = { "db_name": "TestDb", "db_file": "%s/zeronet.db" % config.data_dir, "map": { "data.json": { "to_table": { "test": "test" } } }, "tables": { "test": { "cols": [ ["test_id", "INTEGER"], ["title", "TEXT"], ], "indexes": ["CREATE UNIQUE INDEX test_id ON test(test_id)"], "schema_changed": 1426195822 } } } if os.path.isfile(db_path): os.unlink(db_path) db = Db(schema, db_path) db.checkTables() db.close() # Verify tables assert os.path.isfile(db_path) db = Db(schema, db_path) tables = [row["name"] for row in db.execute("SELECT name FROM sqlite_master WHERE type='table'")] assert "keyvalue" in tables # To store simple key -> value assert "json" in tables # Json file path registry assert "test" in tables # The table defined in dbschema.json # Verify test table cols = [col["name"] for col in db.execute("PRAGMA table_info(test)")] assert "test_id" in cols assert "title" in cols db.close() # Cleanup os.unlink(db_path)
def search(text, table, column): resByAllWordsArr = [] # [[][][]] justMmm = [] for word in word_cleaner(text): # TODO: or 2 or 3 spaces temp = Db().search_by_word_with_like(DB_NAME, table, column, word) resByAllWordsArr.append(temp) # append добавляет мссив в первую ячейку justMmm += temp r = [] for resByWordArr in list( set(justMmm) ): #list(set(resAllWords)) - все вопросы которые сматчились в поиске предыдущем, ни не повторяются for resArr in resByAllWordsArr: # [] for machedQuestion in resArr: if (machedQuestion == resByWordArr): temp_hash = hashlib.md5( str(machedQuestion[0]).encode("utf-8")).digest() firstStepForThisItem = True for q in r: # Done q1, q2, q3=q# todo: how q['hash'] if q['hash'] == temp_hash: # or q1==temp_hash or q2==temp_hash: firstStepForThisItem = False q['matchedCount'] += 1 # t=any ( tt ) if (firstStepForThisItem): # count ==0 r.append({ 'hash': temp_hash, 'question': machedQuestion, 'matchedCount': 1 }) # else: return r
def start(bot, update): # нам сёда пришел поисковый запрос от Юзера # подробнее об объекте update: https://core.telegram.org/bots/api#update # logging.warning(update.message.chat.username) print(update.message.chat.username, flush=True) if str(update.message.text).startswith("/start"): update.message.reply_text( "Привет! \r\nТы можешь написать вопрос про *Каи*. :) \r\nА я скину тебе лучшие. \r\nБаза была набрана в основном из 'Подслушано Каи' и офтильтрована нами. \r\nС наилучшими пожеланиями, команда 'Бот Номер Один'. " ) return results = search(update.message.text, T_Question_Answer, QUESTION) # TODO: поменять бд sort = sorted(results, key=lambda k: k['matchedCount'])[:-4:-1] fma = For_more_answers() fma.message_id_from_usersText = (update.message.message_id) # выдаёт только ВопросОтвет for item in sort: keyboard = [[ InlineKeyboardButton("Показать ответ:", callback_data=item['question'][0]) ]] reply = InlineKeyboardMarkup(keyboard) t = item['question'][1] t = update.message.reply_text(str(t), reply_markup=reply) fma.messages.append(str(item['question'][0]) + ',' + str(t.message_id)) gg = fma.Compress_for_recieve() keyboard = [[InlineKeyboardButton("Показать еще!", callback_data=gg)] ] # TODO: ссылка на мессадж reply = InlineKeyboardMarkup(keyboard) update.message.reply_text("____У нас есть еще:)_____", reply_markup=reply) t = Db().Execute( DB_NAME, "INSERT INTO " + T_TELEGRAM_MESSAGES + "(message_id, Text, User) VALUES(" + str(update.message.message_id) + ", '" + update.message.text + "', '" + str(update.message.chat).replace("'", '"') + "' )" ) # bot.sendMessage(chat_id=update.message.chat_id, text=str(t), reply_markup=reply)
def getTestDb(self): from Db import Db path = "%s/benchmark.db" % config.data_dir if os.path.isfile(path): os.unlink(path) schema = { "db_name": "TestDb", "db_file": path, "maps": { ".*": { "to_table": { "test": "test" } } }, "tables": { "test": { "cols": [["test_id", "INTEGER"], ["title", "TEXT"], ["json_id", "INTEGER REFERENCES json (json_id)"]], "indexes": ["CREATE UNIQUE INDEX test_key ON test(test_id, json_id)"], "schema_changed": 1426195822 } } } db = Db.Db(schema, path) yield db db.close() if os.path.isfile(path): os.unlink(path)
def db(request): db_path = "%s/zeronet.db" % config.data_dir schema = { "db_name": "TestDb", "db_file": "%s/zeronet.db" % config.data_dir, "maps": { "data.json": { "to_table": [ "test", {"node": "test", "table": "test_importfilter", "import_cols": ["test_id", "title"]} ] } }, "tables": { "test": { "cols": [ ["test_id", "INTEGER"], ["title", "TEXT"], ["json_id", "INTEGER REFERENCES json (json_id)"] ], "indexes": ["CREATE UNIQUE INDEX test_id ON test(test_id)"], "schema_changed": 1426195822 }, "test_importfilter": { "cols": [ ["test_id", "INTEGER"], ["title", "TEXT"], ["json_id", "INTEGER REFERENCES json (json_id)"] ], "indexes": ["CREATE UNIQUE INDEX test_importfilter_id ON test_importfilter(test_id)"], "schema_changed": 1426195822 } } } if os.path.isfile(db_path): os.unlink(db_path) db = Db(schema, db_path) db.checkTables() def stop(): db.close() os.unlink(db_path) request.addfinalizer(stop) return db
def test(self, spec_filename): function_map = { "TEXT": str, "INTEGER": int, "BOOLEAN": int, "DATE": str } properties = Properties.getProperties() db = Db(properties) data_files = os.listdir("data") file_map = {} for data_file in data_files: file_info = data_file.split("_") if not file_info[0] in file_map: file_map[file_info[0]] = [] file_map[file_info[0]].append(data_file) with open("specs/" + spec_filename, "rt") as spec_file: filename_info = spec_filename.split(".") file = csv.reader(spec_file, delimiter=",") next(file) # Skip first row columns = [] for column in file: columns.append({"name": column[0], "width": int(column[1]), "data_type": column[2]}) if filename_info[0] in file_map: for data_filename in file_map[filename_info[0]]: with open("data/" + data_filename, "rt") as data_file: for line in data_file: sql = "SELECT count(*) as total FROM {} WHERE ".format(filename_info[0]) index = 0 fields = [] for column in columns: fields.append(column["name"] + " = '" + str(function_map[column["data_type"]](line[index:(index + column["width"])])) + "'") index += column["width"] sql += " AND ".join(fields) row = db.select(sql)[0] if row["total"] != 1: raise Exception("Line: {} was not inserted".format(line.strip()))
class App: user = None cfg = None db = None def __init__(self, cfg): self.cfg = cfg self.db = Db() if self.db: self.db.connect(cfg.getDbPath()) def routing(self): return { '/echo(/.*)?': self.echo } def echo(self, vars, params): params[u'path_vars'] = vars return self.to_json_response(params) def to_json_response(self, data): s = json.dumps(data, indent=2) return ('text/json', s)
import sqlite3 from Db import Db if __name__ == '__main__': db = Db('social.db') db.execue_script('social.sql') print("1 Find the names of all students who are friends with someone named Gabriel.") comm1 = "SELECT Highschooler.name FROM Highschooler " \ "WHERE Highschooler.ID IN " \ "(SELECT Friend.ID2 FROM Highschooler, Friend " \ "WHERE Highschooler.ID = Friend.ID1 " \ "AND Highschooler.name = 'Gabriel')" a = db.query(comm1) print(a) print("2. For every student who likes someone 2 or more " "grades younger than themselves, \n" "return that student's name and grade, \n" "and the name and grade of the student they like.") comm1 = "SELECT (SELECT name FROM HighSchooler, Likes "\ "WHERE HighSchooler.ID = L.ID1), "\ "(SELECT grade from HighSchooler, Likes "\ "WHERE HighSchooler.ID = L.ID1), "\ "name, grade "\ "FROM HighSchooler H, Likes L "\ "WHERE H.ID = L.ID2 and H.grade + 2 <= "\ "(SELECT grade from HighSchooler "\ "WHERE HighSchooler.ID=L.ID1)" a = db.query(comm1) print(a)
import sqlite3 from Db import Db if __name__ == '__main__': db = Db('test.db') db.execue_script('xjadralci.sql') comm = "SELECT ime, rating FROM Jadralec j WHERE rating%2=0" k = db.query(comm) print(k) comm = "SELECT c1.ime, c1.cid FROM Coln c1, Coln c2 WHERE c1.ime = c2.ime AND c1.cid != c2.cid" k = db.query(comm) print(k) comm = "SELECT c.ime, c.dolzina, j.starost FROM Coln c " \ "JOIN Rezervacija r USING(cid) " \ "JOIN Jadralec j USING(jid) " \ "WHERE c.dolzina > 35 " \ "AND j.starost > 35" k = db.query(comm) print(k) comm = "SELECT c.ime, c.dolzina, j.starost FROM Coln c, Rezervacija r, Jadralec j " \ "WHERE c.cid = r.cid " \ "AND r.jid = j.jid " \ "AND c.dolzina > 35 "\ "AND j.starost > 35" k = db.query(comm) print(k) comm = "SELECT DISTINCT j.ime, r.dan FROM Jadralec j LEFT JOIN Rezervacija r"
import os import csv from Properties import Properties from Db import Db function_map = { "TEXT": str, "INTEGER": int, "BOOLEAN": int, "DATE": str } properties = Properties.getProperties() db = Db(properties) spec_files = os.listdir("specs") data_files = os.listdir("data") # map files into a dictionary file_map = {} for data_file in data_files: file_info = data_file.split("_") if not file_info[0] in file_map: file_map[file_info[0]] = [] file_map[file_info[0]].append(data_file) for spec_filename in spec_files: with open("specs/" + spec_filename, "rt") as spec_file: filename_info = spec_filename.split(".") file = csv.reader(spec_file, delimiter=",") next(file) # Skip first row
import sqlite3 from Db import Db if __name__ == '__main__': db = Db('books.db') db.execue_script('employees.sql') db.execue_script('classes.sql') print("Print the names and ages of each employee who works in both " "Sports and Travel department.") comm1 = "SELECT Emp.ename, Emp.age FROM Emp, Works w1, Works w2, Dept d1, Dept d2 "\ "WHERE Emp.eid = w1.eid AND w1.did = d1.did AND d1.dname = 'Travels' AND " \ "Emp.eid = w2.eid AND w2.did = d2.did AND d2.dname = 'Sports'" a = db.query(comm1) print(a) comm1 = "SELECT Emp.ename, Emp.age FROM Emp, Dept, Works "\ "WHERE Works.eid = Emp.eid "\ "AND Works.did = Dept.did " \ "AND Dept.dname = 'Travels' " \ "AND Emp.eid IN "\ "(SELECT Emp.eid FROM Emp, Dept, Works "\ "WHERE Works.eid = Emp.eid "\ "AND Works.did = Dept.did " \ "AND Dept.dname = 'Sports')" a = db.query(comm1) print(a) print("For each department with more than 1 full employees " "(where the part-time add up to at least " "that many fulltime employees),\n"
def save(self): db_obj = Db() db_obj.save_raw(self)
"cols": [ ["test_id", "INTEGER"], ["title", "TEXT"], ["json_id", "INTEGER REFERENCES json (json_id)"] ], "indexes": ["CREATE UNIQUE INDEX test_key ON test(test_id, json_id)"], "schema_changed": 1426195822 } } } if os.path.isfile("%s/benchmark.db" % config.data_dir): os.unlink("%s/benchmark.db" % config.data_dir) with benchmark("Open x 10", 0.13): for i in range(10): db = Db(schema, "%s/benchmark.db" % config.data_dir) db.checkTables() db.close() yield "." db = Db(schema, "%s/benchmark.db" % config.data_dir) db.checkTables() import json with benchmark("Insert x 10 x 1000", 1.0): for u in range(10): # 10 user data = {"test": []} for i in range(1000): # 1000 line of data data["test"].append({"test_id": i, "title": "Testdata for %s message %s" % (u, i)}) json.dump(data, open("%s/test_%s.json" % (config.data_dir, u), "w"))
def __init__(self, interface): super(LldpCtl, self).__init__() self.location = socket.gethostname().upper() self.interface = interface self.db = Db() self.is_alive = True
class SiteStorage: def __init__(self, site, allow_create=True): self.site = site self.directory = "data/%s" % self.site.address # Site data diretory self.log = site.log self.db = None # Db class self.db_checked = False # Checked db tables since startup self.event_db_busy = None # Gevent AsyncResult if db is working on rebuild self.has_db = self.isFile("dbschema.json") # The site has schema if not os.path.isdir(self.directory): if allow_create: os.mkdir(self.directory) # Create directory if not found else: raise Exception("Directory not exists: %s" % self.directory) # Load db from dbschema.json def openDb(self, check=True): schema = self.loadJson("dbschema.json") db_path = self.getPath(schema["db_file"]) if check: if not os.path.isfile(db_path) or os.path.getsize(db_path) == 0: # Not exits or null self.rebuildDb() self.db = Db(schema, db_path) if check and not self.db_checked: changed_tables = self.db.checkTables() if changed_tables: self.rebuildDb(delete_db=False) # Todo only update the changed table datas def closeDb(self): if self.db: self.db.close() # Return db class def getDb(self): if not self.db and self.has_db: self.openDb() return self.db # Rebuild sql cache def rebuildDb(self, delete_db=True): self.event_db_busy = gevent.event.AsyncResult() schema = self.loadJson("dbschema.json") db_path = self.getPath(schema["db_file"]) if os.path.isfile(db_path) and delete_db: if self.db: self.db.close() # Close db if open self.log.info("Deleting %s" % db_path) try: os.unlink(db_path) except Exception, err: self.log.error("Delete error: %s" % err) self.openDb(check=False) self.log.info("Creating tables...") self.db.checkTables() self.log.info("Importing data...") cur = self.db.getCursor() cur.execute("BEGIN") cur.logging = False found = 0 s = time.time() for content_inner_path, content in self.site.content_manager.contents.items(): content_path = self.getPath(content_inner_path) if os.path.isfile(content_path): # Missing content.json file if self.db.loadJson(content_path, cur=cur): found += 1 else: self.log.error("[MISSING] %s" % content_inner_path) for file_relative_path in content["files"].keys(): if not file_relative_path.endswith(".json"): continue # We only interesed in json files file_inner_path = self.site.content_manager.toDir(content_inner_path)+file_relative_path # Relative to content.json file_inner_path = file_inner_path.strip("/") # Strip leading / file_path = self.getPath(file_inner_path) if os.path.isfile(file_path): if self.db.loadJson(file_path, cur=cur): found += 1 else: self.log.error("[MISSING] %s" % file_inner_path) cur.execute("END") self.log.info("Imported %s data file in %ss" % (found, time.time()-s)) self.event_db_busy.set(True) # Event done, notify waiters self.event_db_busy = None # Clear event
from Db import Db from Excel import Excel d = Db() print d.getFirm_by_id('516cf679a09eee0ce4979f10')
def __init__(self): self._r = redis.Redis(host='127.0.0.1',port=6379) self._db = Db.getinstance()
def rollback(self): db = Db(self.properties); db.execute("DROP SCHEMA FileStorage;");
def testQueries(self): db_path = "%s/zeronet.db" % config.data_dir schema = { "db_name": "TestDb", "db_file": "%s/zeronet.db" % config.data_dir, "map": { "data.json": { "to_table": { "test": "test" } } }, "tables": { "test": { "cols": [ ["test_id", "INTEGER"], ["title", "TEXT"], ], "indexes": ["CREATE UNIQUE INDEX test_id ON test(test_id)"], "schema_changed": 1426195822 } } } if os.path.isfile(db_path): os.unlink(db_path) db = Db(schema, db_path) db.checkTables() # Test insert for i in range(100): db.execute("INSERT INTO test ?", {"test_id": i, "title": "Test #%s" % i}) assert db.execute("SELECT COUNT(*) AS num FROM test").fetchone()["num"] == 100 # Test single select assert db.execute("SELECT COUNT(*) AS num FROM test WHERE ?", {"test_id": 1}).fetchone()["num"] == 1 # Test multiple select assert db.execute("SELECT COUNT(*) AS num FROM test WHERE ?", {"test_id": [1,2,3]}).fetchone()["num"] == 3 assert db.execute("SELECT COUNT(*) AS num FROM test WHERE ?", {"test_id": [1,2,3], "title": "Test #2"}).fetchone()["num"] == 1 assert db.execute("SELECT COUNT(*) AS num FROM test WHERE ?", {"test_id": [1,2,3], "title": ["Test #2", "Test #3", "Test #4"]}).fetchone()["num"] == 2 # Test named parameter escaping assert db.execute("SELECT COUNT(*) AS num FROM test WHERE test_id = :test_id AND title LIKE :titlelike", {"test_id": 1, "titlelike": "Test%"}).fetchone()["num"] == 1 db.close() # Cleanup os.unlink(db_path)
import sqlite3 from Db import Db if __name__ == '__main__': db = Db('classes.db') db.execue_script('classes.sql') print("Find all students who took a class in California " "from an instructor not in the student's major department and " "got a score over 80. Return the student name, university, and score.") comm1 = "SELECT DISTINCT Student.name, Class.univ, Took.score " \ "FROM Student, Class, Took, Instructor " \ "WHERE Student.studID = Took.studID " \ "AND Instructor.instID = Took.instID " \ "AND Class.classID = Took.classID " \ "AND Student.major != Instructor.dept " \ "AND Took.score >= 80 " \ "AND Class.region = 'CA'" a = db.query(comm1) print(a) comm1 = "SELECT DISTINCT Student.name, Class.univ, Took.score FROM Student "\ "JOIN Took ON Student.studID = Took.studID "\ "JOIN Instructor ON Instructor.instID = Took.instID "\ "JOIN Class ON Class.classID = Took.classID "\ "WHERE Student.major != Instructor.dept "\ "AND Took.score >= 80 "\ "AND Class.region = 'CA'" a = db.query(comm1) print(a)
import sqlite3 from Db import Db if __name__ == '__main__': db = Db('rating.db') db.execue_script('rating.sql') comm1 = "SELECT Movie.title FROM Movie WHERE director = 'Steven Spielberg'" a = db.query(comm1) print(a) print("2. Find all years that have a movie that received a rating of 4 or 5 " "and sort them in increasing order.") comm1 = "SELECT DISTINCT Movie.year FROM Movie, Rating " \ "WHERE Movie.mID = Rating.mID " \ "AND Rating.stars >= 4 " \ "ORDER BY Movie.year ASC" a = db.query(comm1) print(a) print("3. Find the titles of all movies that have no ratings.") comm1 = "SELECT Movie.title FROM Movie " \ "WHERE NOT EXISTS "\ "(SELECT * FROM Rating "\ "WHERE Movie.mID = Rating.mID)" a = db.query(comm1) print(a) print("4. Some reviewers didn't provide a date with their rating. " "Find the names of all reviewers who have " "ratings with a NULL value for the date.")
def __init__(self, cfg): self.cfg = cfg self.db = Db() if self.db: self.db.connect(cfg.getDbPath())
import sqlite3 from Db import Db if __name__ == '__main__': db = Db('test.db') db.execue_script('xjadralci.sql') comm = 'SELECT ime FROM Jadralec WHERE rating % 2 = 0' even = db.query(comm) print(even) comm = 'SELECT ime FROM Jadralec WHERE rating % 2 != 0' odd = db.query(comm) print(odd) comm = 'SELECT * FROM Coln c1, Coln c2 WHERE (c1.ime = c2.ime) AND (c1.cid != c2.cid)' equal_name = db.query(comm) print(equal_name) comm = 'SELECT * FROM Coln c JOIN Rezervacija r USING(cid) JOIN Jadralec j' \ ' USING(jid) WHERE c.dolzina>35 and j.starost<35' boat_name = db.query(comm) print(boat_name) comm1 = 'SELECT DISTINCT j.ime, r.dan FROM Jadralec j LEFT JOIN Rezervacija r USING(jid)' \ 'WHERE r.dan NOT NULL' comm2 = 'SELECT DISTINCT j.ime, r.dan FROM Jadralec j LEFT JOIN Rezervacija r USING(jid)' reservations1 = db.query(comm1) reservations2 = db.query(comm2) print(reservations1) print(reservations2)
def migrate(self): db = Db(self.properties); db.execute("CREATE SCHEMA FileStorage DEFAULT CHARACTER SET 'utf8';");
import os from FileNames import FileNames from FormatFile import FormatFile from DataFile import DataFile from Query import Query from Db import Db dataFolder = "data" specFolder = "specs" db = Db() q = Query() for filename in os.listdir(dataFolder): name = FileNames(filename) spec = FormatFile(specFolder+"/"+name.specFileName) data = DataFile(dataFolder+"/"+filename, spec.getFormat()) for row in data.getData(): query, params = q.insert_query(name.tableName, spec.getCols(), row) print(query, params) db.insert(query, params)