def sync_batch(): processes = {} db.get_connection() for item in models.VpcSyncTask.objects(state='queued'): print('Processing', item.account_number, item.region, item.vpc_id) p = Process(target=sync, args=(item, )) processes[item.id] = p item.state = 'running' item.start_date = datetime.datetime.utcnow() item.save() db.close() for p in processes.values(): p.start() print(p.pid) # wait for the processes db.get_connection() while True: for key in list(processes): process = processes[key] if process.is_alive(): continue process.join() item = models.VpcSyncTask.objects(id=key).first() item.state = 'completed' item.end_date = datetime.datetime.utcnow() item.save() del (processes[key]) if not processes: break else: time.sleep(5) db.close()
def run(self): self.poller.async_run(self.on_update) self.updater.start_polling() self.updater.idle() self.poller.stop() self.persist() db.close()
def logonUser(self, username=None, callback=None): if username == None: msg = "logonUser must be passed username" print msg return msg c = db.userData.columns statment = db.select([c.userId], c.userName == username) conn = db.connect() result = conn.execute(statment) row = result.fetchone() result.close() conn.close() if row == None: msg = "Did not find username '" + username + "' in database" print msg userId = -1 else: userId = row[c.userId] print "Sending userid:", userId if callback == None: return json.dumps(userId) else: return callback + "(" + json.dumps(userId) + ")" db.close()
def getWorldData(self, worldId=None, callback=None): global worldData if worldId == None: print "getWorldData must be passed worldId" return "getWorldData must be passed worldId" elif worldId not in worldData: c = db.worldData.columns statment = db.select( [c.worldId, c.worldSizeX, c.worldSizeY, c.worldWrapX, c.worldWrapY, c.worldSeaLevel], c.worldId == worldId, ) conn = db.connect() result = conn.execute(statment) row = result.fetchone() result.close() conn.close() if row == None: print "Did not find worldData record, exiting." return "Did not find worldData record, exiting." worldData[worldId] = WorldData(row) if callback == None: return json.dumps(worldData[worldId].json()) else: return callback + "(" + json.dumps(worldData[worldId].json()) + ")" db.close()
def getUserData(self, userId=None, callback=None): global userData if userId == None: print "getUserData must be passed userId" return "getUserData must be passed userId" elif userData == None: c = db.userData.columns statment = db.select([c.userId, c.userName, c.userX, c.userY, c.userZ], c.userId == userId) conn = db.connect() result = conn.execute(statment) row = result.fetchone() result.close() conn.close() if row == None: print "Did not find userData record, exiting." return "Did not find userData record, exiting." userData = UserData(row) print userData.json() if callback == None: return json.dumps(userData.json()) else: return callback + "(" + json.dumps(userData.json()) + ")" db.close()
def main(): db.connect() display_title() display_stores() while True: command = input("Command: ") if command == "store": display_items_by_store() elif command == "type": display_items_by_types() elif command == "cost": display_items_by_cost() elif command == "add": add_item() elif command == "del": delete_item() elif command == "out": cashout() elif command == "exit": break else: print("Not a valid command. Please try again.\n") display_menu() db.close() print("Bye!")
def delete(): db.connect() username = request.form['username'] username = username.strip() db.delete_user(username) db.close() return render_template('deleteSuccessful.html', username=username)
def checkNewNicks(self): """ check any new posts for new nicknames """ query = "select address, data from posts where post_id > " + str( self.savedAllPostId) + " and data like 'nick:%'" rows = putQuery(self, query) if rows is False: return False conn = db.open() c = conn.cursor() for row in rows['rows']: c.execute('select id from nicks where address = ?;', (row[0], )) nickId = c.fetchone() nick = row[1].split(':', 1) if nickId is None: c.execute('insert into nicks (nick, address) values (?,?);', (nick[1], row[0])) else: c.execute('update nicks set nick = ? where address = ?;', (nick[1], row[0])) #also check follows for potential updates c.execute('select nick from follows where address = ?;', (row[0], )) checkNick = c.fetchone() if checkNick is not None: if nick[1] != checkNick[0]: c.execute('update follows set nick=? where address=?;', (nick[1], row[0])) db.close(conn) return
def main(): while True: print("Welcome to our parking!") action = input("Выберите действие: \n" "1. Получить талон \n" "2. Сдать талон \n" "3. Выйти \n") if action == "1": start_time = datetime.datetime.now() print("Ваш талон: \n" f"Номер: {db.start(start_time)} \n" f"Дата въезда: {start_time}") elif action == "2": ticket_id = int(input("Введите номер талона: \n")) if db.check_status(ticket_id) == 1: end_time = datetime.datetime.now() db.end(ticket_id, end_time) sum = estimate.sum(start = db.show_start(ticket_id), end = db.show_end(ticket_id)) db.sum(ticket_id,sum) print(f"Дата въезда: {db.show_start(ticket_id)}\n" f"Дата выезда: {db.show_end(ticket_id)}\n" f"Сумма: {sum}") elif db.check_status(ticket_id) == 2: print(f"Данный талон был использован {db.show_all(ticket_id)}!") elif action == "3": db.close() break else: print("Error!")
def make_bsddb(dbfile, dump_file): import bsddb db = bsddb.btopen(dbfile, 'w', cachesize=1024*1024*1024) from infogami.infobase.utils import flatten_dict indexable_keys = set([ "authors.key", "works.key", # edition "authors.author.key", "subjects", "subject_places", "subject_people", "subject_times" # work ]) for type, key, revision, timestamp, json in read_tsv(dump_file): db[key] = json d = simplejson.loads(json) index = [(k, v) for k, v in flatten_dict(d) if k in indexable_keys] for k, v in index: k = web.rstrips(k, ".key") if k.startswith("subject"): v = '/' + v.lower().replace(" ", "_") dbkey = web.safestr('by_%s%s' % (k, v)) if dbkey in db: db[dbkey] = db[dbkey] + " " + key else: db[dbkey] = key db.close() log("done")
def old_initialize(): engine, session = db.create("sqlite:///db.sqlite3") db.create_tables(engine) p_snps = 0 p_pubs = 0 snps = get_complete_rsids() for s in snps: time.sleep(.1) pubs = get_pmids(s) if len(pubs) > 0: p_snps = p_snps + 1 print("Processed {} snps".format(p_snps)) for p in pubs: if not db.check_snp(session=session, id=s, pub=p): db.add_snp(session, s, p) if not db.check_publication(session=session, id=p): info = get_publication(p) db.add_publication(session, id=p, title=info["title"], abstract=info["abstract"]) p_pubs = p_pubs + 1 print("Processed {} pubs".format(p_pubs)) db.close(session) return ()
def sync_account_region(account_number, region, vpc_id='', new_connection=True): rsp = {} if new_connection: db.get_connection() file_name = '/tmp/account-%s-%s' % (account_number, region) if vpc_id: file_name = '%s-%s' % (file_name, vpc_id) logging.basicConfig(filemode='w', level=logging.INFO, filename=file_name, format="%(asctime)s %(message)s") logging.info('Syncing Account %s %s %s', account_number, region, vpc_id) rsp['vpcs'] = aws_resources.vpc.sync(account_number, region, vpc_id) rsp['route-tables'] = aws_resources.route_table.sync( account_number, region, vpc_id) rsp['subnets'] = aws_resources.subnet.sync(account_number, region, vpc_id) rsp['security-groups'] = aws_resources.security_group.sync( account_number, region, vpc_id) rsp['instances'] = aws_resources.instance.sync(account_number, region, vpc_id) rsp['classic-load-balancers'] = aws_resources.classic_lb.sync( account_number, region, vpc_id) rsp['elastic-load-balancers'] = aws_resources.elastic_lb.sync( account_number, region, vpc_id) rsp['tgw-attachments'] = aws_resources.tgw_attachment.sync( account_number, region, vpc_id) logging.info(json.dumps(rsp, indent=4)) if new_connection: db.close() return rsp
def main(): db.connect() display_books() display_authors() display_categories() cart_list = [] while True: command = input("Command: ") if command == "category": display_books_by_category() elif command == "author": display_books_by_author() elif command == "title": display_books_by_title() elif command == "add": add_cart(cart_list) elif command == "del": delete_item(cart_list) elif command == "print": get_cart(cart_list) elif command == "exit": break else: print("Not a valid command. Please try again.\n") display_menu() db.close() print("Bye!")
def delete(self, kwargs=None): """ Delete method for Producer company table :return: """ conn, cur = get_conn_cur() producer_delete_template = "DELETE FROM \"ProducerCompany\" WHERE id=%s;" id = None if kwargs: if 'id' not in kwargs.keys(): if self.fields['id'] is not None: id = self.fields['id'] else: id = int(kwargs['id']) if id: try: cur.execute(producer_delete_template, (id, )) conn.commit() except psycopg2.DatabaseError as e: conn.rollback() raise e finally: for name in self.fields.keys(): self.fields[name] = None close(conn, cur)
def sync_all_account_regions(): # for each account and region do a sync in parallel (within account/region its sequential) print('Starting sync all accounts/regions at %s' % datetime.datetime.now()) accounts = [] db.get_connection() for account in models.Account.objects: for region in account.regions: accounts.append([account.account_number, region]) db.close() processes = {} for account in accounts: # account is [account_number, region] pname = '%s-%s' % (account[0], account[1]) p = Process(target=sync_account_region, args=(account), name=pname) print('Started', pname) p.start() processes[p.pid] = p print('Started', len(processes), 'processes') wait_for_processes(processes) db.get_connection() for account in db.get_items(models.Account, page_size=0, json_output=False): account.last_updated = datetime.datetime.utcnow() account.save() db.close() print('Ending sync cycle at %s' % datetime.datetime.now()) return
def update(self, kwargs): """ UPDATE method for ItemComposition table :param kwargs: :return: values of model fields """ conn, cur = get_conn_cur() item_composition_update_template = "UPDATE \"ItemComposition\" SET material=%s, weight=%s WHERE id=%s;" try: for name, value in kwargs.items(): if name in self.FIELDS.keys(): kwargs[name] = self.FIELDS[name](value) except ValueError as e: raise e try: cur.execute(item_composition_update_template, (kwargs["material"], kwargs["weight"], kwargs["id"])) conn.commit() except psycopg2.DatabaseError as e: conn.rollback() raise e finally: for name in self.fields.keys(): self.fields[name] = kwargs[name] close(conn, cur)
def update(self, kwargs): """ UPDATE method for JewelryShop table :param kwargs: :return: values of model fields """ conn, cur = get_conn_cur() jewelry_shop_update_template = "UPDATE \"JewelryShop\" SET address=%s, license_copy=%s, schedule_day=%s, schedule_time=%s WHERE id=%s;" try: for name, value in kwargs.items(): if name in self.FIELDS.keys(): kwargs[name] = self.FIELDS[name](value) except ValueError as e: raise e try: cur.execute(jewelry_shop_update_template, (kwargs["address"], kwargs["license_copy"], kwargs["schedule_day"], kwargs["schedule_time"], kwargs["id"])) conn.commit() except psycopg2.DatabaseError as e: conn.rollback() raise e finally: for name in self.fields.keys(): self.fields[name] = kwargs[name] close(conn, cur)
def get(self): email = str(request.args.get('email')) userid = utils.userId(email) conn = db.connect() cursor = conn.cursor() sql = "SET @DAY := -1;" cursor.execute(sql) cursor.fetchall() sql = "SELECT @DAY := @DAY +1 as DAY, (SELECT COUNT(*) FROM diet WHERE day = @DAY and user_id=%d and date between DATE_ADD(NOW(), INTERVAL -1 WEEK) AND NOW()) as eat FROM diet WHERE @DAY < 3 order by DAY;" % ( userid) cursor.execute(sql) result = cursor.fetchall() db.close(conn) style = week.personType(userid) res = { "mon": result[0]['eat'], "lun": result[1]['eat'], "din": result[2]['eat'], "sna": result[3]['eat'], "style": style } print(res) return res
def add_wagons(loco_id, wagons): for wagon in wagons: wagon_type_id = add_wagon.get_wagon_type_id(str(wagon["name"])) if wagon_type_id is None: print "could not find wagon:", str(wagon["name"]) continue sql = ''' select id from wagons where locomotive_id is null and wagon_type_id = ? limit ? ''' db.open() cur = db.conn.cursor() cur.execute(sql, (wagon_type_id, wagon.get("count", 1))) wagon_ids = [] for res in cur.fetchall(): wagon_ids.append((loco_id, res[0])) print "adding", len(wagon_ids), str(wagon["name"]), "wagons" sql = ''' update wagons set locomotive_id = ? where locomotive_id is null and id = ? ''' cur.executemany(sql, wagon_ids) db.commit() db.close()
def checkNewNicks(self): """ check any new posts for new nicknames """ query = "select address, data from posts where post_id > " + str(self.savedAllPostId) + " and data like 'nick:%'" rows = putQuery(self, query) if rows is False: return False conn = db.open() c = conn.cursor() for row in rows['rows']: c.execute('select id from nicks where address = ?;', (row[0],)) nickId = c.fetchone() nick = row[1].split(':', 1) if nickId is None: c.execute('insert into nicks (nick, address) values (?,?);', (nick[1], row[0])) else: c.execute('update nicks set nick = ? where address = ?;', (nick[1], row[0])) #also check follows for potential updates c.execute('select nick from follows where address = ?;', (row[0],)) checkNick = c.fetchone() if checkNick is not None: if nick[1] != checkNick[0]: c.execute('update follows set nick=? where address=?;', (nick[1], row[0])) db.close(conn) return
def main(): db.connect() display_welcome() display_categories() while True: command = input("Command: ") if command == "cat": display_movies_by_category() elif command == "cat-all": display_movies_by_all_category() elif command == "year": display_movies_by_year() elif command == "year-all": display_movies_by_all_year() elif command == "add": add_movie() elif command == "del": delete_movie() elif command == "exit": break else: print("Not a valid command. Please try again.\n") display_menu() db.close() print("Bye!")
def setups(): """set up classifier if not yet trained""" if philosophyfilter.is_ready(): return db.close() db.connection(db='test_opp') ham = scraper.Doc(url='http://umsu.de/papers/magnetism2.pdf') ham.load_from_db() ham.content = readfile(os.path.join(testdir, 'attitudes.txt')) ham.update_db() spam = scraper.Doc(url='http://umsu.de/papers/spam.pdf') spam.load_from_db() spam.content = """ Lorem ipsum dolor sit amet, consectetur adipiscing elit, sed do eiusmod tempor incididunt ut labore et dolore magna aliqua. Ut enim ad minim veniam, quis nostrud exercitation ullamco laboris nisi ut aliquip ex ea commodo consequat. Duis aute irure dolor in reprehenderit in voluptate velit esse cillum dolore eu fugiat nulla pariatur. Excepteur sint occaecat cupidatat non proident, sunt in culpa qui officia deserunt mollit anim id est laborum. """ spam.update_db() cur = db.cursor() query = "SELECT cat_id FROM cats WHERE label=%s LIMIT 1" cur.execute(query, ('philosophy',)) cat_id = cur.fetchall()[0] query = ("INSERT IGNORE INTO docs2cats (doc_id, cat_id, strength, is_training)" "VALUES (%s, %s, %s, %s)") cur.execute(query, (ham.doc_id, cat_id, 1, 1)) cur.execute(query, (spam.doc_id, cat_id, 0, 1)) philosophyfilter.update()
def runV2(): db = shelve.open("./neu.db") print len(db) raise SystemExit # for no in xrange(20130001, 20133333): for no in xrange(20135000 - 100, 20136000): no = str(no) if no in db: print no, db[no], "√" continue try: t = LibraryV2(no) t.login() print t.userno, t.name db[t.userno] = t.name db.sync() time.sleep(0.3) except KeyboardInterrupt: print "Interrupted!" raise SystemExit except: print no, "......", "×" db.close()
def do_import(): musicbrainzngs.set_useragent("SBUIfy", "0.0.1", "*****@*****.**") logging.getLogger('musicbrainzngs').setLevel(logging.WARNING) db.init() # remove existing data print('deleting existing data...') # delete_images() todo: only delete artist & album images; move to image module db.execute_script('truncate.sql') print('done.', flush=True) # import albums, songs, and artists for all labels db.execute("SELECT * FROM label") for label in db.get_cursor().fetchall(): print('importing label {}...'.format(label['name'])) import_label_releases(label['id'], label['mbid']) print('done.', flush=True) # wrap up print('finishing...') db.execute_script('set_hibernate_sequence.sql') print('done.') db.close()
def do(i): f = sim[0] + str(i).zfill(3) r = db.connect(zodb=D + f) g = r[f] # print f,len(g.scores), 'generations' run = f model = 'taga' if sim == 'pb': network_args = g.new_individual_args if sim == 'bpg': network_args = g.new_individual_args['network_args'] q = str(network_args['new_node_args']['quanta']).zfill(2) if q == '00': q = 'fp' top = network_args['topology'] neurons = str(network_args['num_nodes']).zfill(2) timing = network_args['update_style'] mut = g.mut mp = str(g.mutationRate) genpop = str(len(g)).zfill(3) for x in (run, model, q, top, neurons, timing, mut, mp, genpop): # print x assert type(x) is str s = ' '.join((run, model, q, top, neurons, timing, mut, mp, genpop)) # print s for curg in range(0, len(g.scores)): score = g.scores[curg].max print s, curg, '%.2f' % score db.close()
def main(): db.connect() while True: print("VECTA CORP HELP DESK ADMIN LOG IN") print("-" * 95) username = input("Username: "******"Password: "******"\nYour credentials are invalid. Please try again.\n") display_menu() while True: command = input("Enter command: ") if command == "view": view_employees() elif command == "add": add_employee() elif command == "del": delete_employee() elif command == "exit": break else: print("Not a valid command. Please try again. \n") display_menu() db.close() print("The program has been terminated!")
def main(): db.connect() # while True: # print("VECTA CORP HELP DESK USER LOG IN") # print("-" * 110) # username = input("Username: "******"Password: "******"\nYour credentials are invalid. Please try again.\n") display_menu() while True: command = input("Enter command: ") if command == "view": view_tickets() elif command == "issue": view_ticket_issue() elif command == "add": add_ticket() elif command == "update": update_ticket() elif command == "exit": break else: print("Not a valid command. Please try again.") display_menu() db.close() print("The program has been terminated.")
def make_bsddb(dbfile, dump_file): import bsddb db = bsddb.btopen(dbfile, 'w', cachesize=1024 * 1024 * 1024) from infogami.infobase.utils import flatten_dict indexable_keys = set([ "authors.key", "works.key", # edition "authors.author.key", "subjects", "subject_places", "subject_people", "subject_times" # work ]) for type, key, revision, timestamp, json in read_tsv(dump_file): db[key] = json d = simplejson.loads(json) index = [(k, v) for k, v in flatten_dict(d) if k in indexable_keys] for k, v in index: k = web.rstrips(k, ".key") if k.startswith("subject"): v = '/' + v.lower().replace(" ", "_") dbkey = web.safestr('by_%s%s' % (k, v)) if dbkey in db: db[dbkey] = db[dbkey] + " " + key else: db[dbkey] = key db.close() log("done")
def process_item(self, item, spider): if isinstance(item, GuaziItem): try: db = car_source_pool.connection() cursor = db.cursor() sql = """INSERT INTO `CAR` (`source_site`,`city_id`,`price`,`new_price`,`discharge`,`miles`,`gear_box`,`title`,`desc_str`,`emission`,`car_city_zh`,`license_date`,`car_src_url`,`car_src_id`,`tel`,`status`,`transfer_num`,`detected`,`personal`,`visit_time`,`publish_date`,`lock_user_id`,`car_finger_hash`) VALUES ('%s','%s','%s','%s','%s','%s','%s','%s','%s','%s','%s','%s','%s','%s','%s','%s','%s','%s','%s','%s','%s','%s','%s')""" % ( item['sourceSite'], 1, item['price'], item['newPrice'], item['discharge'], item['miles'], item['gearBox'], item['title'], item['descStr'], item['emission'], item['carCityZh'], item['licenseDate'], item['carSrcUrl'], item['carSrcId'], item['tel'], item['status'], item['transferNum'], item['detected'], item['personal'], item['visitTime'], item['publishDate'], item['lockUserId'], item['carFingerHash']) result = cursor.execute(sql) car_id = int(cursor.lastrowid) sql2 = """INSERT INTO `car_img` (`car_id`,`first_img_str`,`img_str`) values ('%s','%s','%s')""" % (car_id, item['firstImgStr'], item['imgStr']) result2 = cursor.execute(sql2) db.commit() db.close() except Exception as e: log.msg("2", level=log.WARNING) print e finally: pass return item
def get(self): email = str(request.args.get('email')) conn = db.connect() cursor = conn.cursor() sql = "SELECT * FROM user WHERE email='%s'" % (email) cursor.execute(sql) result = cursor.fetchall() db.close(conn) if len(result) > 0: if result[0]['sex'] == 'F': age = result[0]['age'] key = result[0]['height'] mom = result[0]['weight'] kicho = 655.1 + 9.56 * mom + 1.85 * key - 4.68 * age return {"result": "success", "kicho": kicho} else: age = result[0]['age'] key = result[0]['height'] mom = result[0]['weight'] kicho = 66.41 + 13.75 * mom + 5 * key - 6.76 * age return {"result": "success", "kicho": kicho} else: return {"result": "fail"}
def insert(user): conn = connDB() curs = conn.cursor() sql = 'insert into user(username, password) values (%s, %s)' param = (user.username, user.password) curs.execute(sql, param) close(curs, conn)
def select_knowledge(code): conn = db.conn() # 查看添加对象是否存在 result = db.select(conn, "select * from t_code where code = ?", (code,)) if result is None: return {"code": "-1", "message": "record is not exist"} db.close(conn) return {"code": result[0], "name": result[1], "type": result[2]}
def GET(self): import db dishes=db.query("select rowid,* from dish order by rowid") db.close() try: dishes=[x.as_dic() for x in dishes] except Exception, e: return dishes
def killserver(self): """ Cleanup all existing feeds then shutdown the server. """ while len(self.feeds) > 0: self.kill_feed(self.feeds[0]) self.QoS_server.close() db.close(self.conn)
def weekAnalyze(userid): conn = db.connect() cursor = conn.cursor() sql = "select SUM(f.carbohydrate) as car, SUM(f.protein) as pro, SUM(f.fat) as fat from diet as d JOIN food as f on d.food_id = f.food_id where user_id=%d and d.date between DATE_ADD(NOW(), INTERVAL -1 WEEK) AND NOW();"%(userid) cursor.execute(sql) result = cursor.fetchall()[0] db.close(conn) return result['car'], result['pro'], result['fat']
def count_of_allnet_car_source(): db = wcar_pool.connection() cursor = db.cursor() sql = """SELECT COUNT(*) FROM `car_allnet_source`""" cursor.execute(sql) results = cursor.fetchall() db.close() return results[0][0]
def get_area_id_by_name(area_name): connection = db.connect(db.config) with connection.cursor() as cursor: sql = "SELECT id FROM area WHERE name = %s" cursor.execute(sql, (area_name, )) result = cursor.fetchone()[0] db.close(connection) return result
async def knowledge_select(knowledge: Knowledge): conn = db.conn() # 查看添加对象是否存在 result = db.select(conn, "select * from t_code where code = ?", (knowledge.code,)) if result is None: return {"code": "-1", "message": "record is not exist"} db.close(conn) return {"code": "0", "message": "success", "data": {"code": result[0], "name": result[1], "type": result[2]}}
def stop(self): self.running = False self.mouse_handler and self.mouse_handler.stop() self.key_handler and self.key_handler.stop() self.data_handler.stop() self.inqueue.put(None) # Wake up thread waiting on queue db.close() sys.exit()
def car_brand_map_insert(car_brand): db = car_type_pool.connection() cursor = db.cursor() sql = """INSERT INTO `car_brand_map`(`273_brand_id`, `273_brand_name`, `168_brand_id`, `168_brand_name`) VALUES (%d, '%s', %d, '%s')""" % (car_brand['273_brand_id'], car_brand['273_brand_name'], car_brand['168_brand_id'], car_brand['168_brand_name']) print sql result = cursor.execute(sql) db.commit() cursor.close() db.close()
async def knowledge_delete(knowledge: Knowledge): conn = db.conn() # 查看添加对象是否存在 result = db.select(conn, "select * from t_code where code = ?", (knowledge.code,)) if result is None: return {"code": "-1", "message": "record is not exist"} db.execute(conn, "delete from t_code where code = ?", (knowledge.code,)) db.close(conn) return {"code": "0", "message": "success"}
def empty_model_count_of_allnet_car_source(): db = wcar_pool.connection() cursor = db.cursor() sql = """SELECT COUNT(1) FROM `car_allnet_source` WHERE 1 AND `series_id` > 0""" cursor.execute(sql) results = cursor.fetchall() db.close() return results[0][0]
def car_brand_of_168_by_brand_name(brand_name): db = car_type_pool.connection() cursor = db.cursor() sql = """SELECT distinct(`brand_id`) FROM `car_series_hedge_rate` WHERE 1 AND `brand_name` = '%s' limit 1""" % (brand_name) cursor.execute(sql) results = cursor.fetchall() cursor.close() db.close() return results
def car_brand_of_all(): db = car_type_pool.connection() cursor = db.cursor() sql = """SELECT `id`, `name` FROM `car_brand` WHERE 1 AND `status` = 1""" cursor.execute(sql) results = cursor.fetchall() cursor.close() db.close() return results
def update_database(vid_fname, thumb_fname, object_id, object_qos): """ Update the client group's database. """ conn = db.connect() vfurl = settings.ARCHIVE_FEED_URLS + vid_fname ssfurl = 'NULL' if thumb_fname is not None: ssfurl = settings.ARCHIVE_FEED_URLS + thumb_fname db.addArchiveFootage(conn, vfurl, object_id, object_qos, ssfurl) db.close(conn)
def car_series_of_brand_id_of_168(brand_id): db = car_type_pool.connection() cursor = db.cursor() sql = """SELECT `168_series_id`, `168_series_name` FROM `car_series_168` WHERE 1 AND `168_brand_id` = %d""" % (brand_id) cursor.execute(sql) results = cursor.fetchall() cursor.close() db.close() return results
def update_model_id_of_allnet_car_source(model_id, car_sale_id): db = wcar_pool.connection() cursor = db.cursor() sql = """UPDATE `car_allnet_source` SET `model_id` = %d WHERE `id` = %d""" % (model_id, car_sale_id) try: cursor.execute(sql) # utils.debug("update %d's model_id to %d." % (car_sale_id, model_id)) db.commit() except: db.rollback() db.close()
def empty_model_of_allnet_car_source(start=0, limit=1000): db = wcar_pool.connection() cursor = db.cursor() sql = """SELECT `id`, `series_id`, `title`, `model` FROM `car_allnet_source` WHERE 1 AND `series_id` > 0 LIMIT %d, %d""" % (start, limit) cursor.execute(sql) results = cursor.fetchall() db.close() return results
def insert_or_update_car_series_map_of_168(series_id_of_273, series_id_of_168): db = car_type_pool.connection() cursor = db.cursor() sql = """INSERT INTO `car_series_map` (`273_series_id`, `168_series_id`) VALUES(%d, %d) ON DUPLICATE KEY UPDATE `168_series_id` = %d""" % (series_id_of_273, series_id_of_168, series_id_of_168) result = cursor.execute(sql) # utils.debug("update %d's model_id to %d." % (car_sale_id, model_id)) db.commit() cursor.close() db.close() return result
def getNick(self, address): """ get the nickname associated with the specified address """ conn = db.open() c = conn.cursor() c.execute('select nick from nicks where address = ?;', (address,)) nick = c.fetchone() db.close(conn) if nick is None: return address else: return nick[0]
def getFollows(self): """ return the list of follows from the db update the list of nicks first """ if pollAllPosts(self): checkNewNicks(self) conn = db.open() c = conn.cursor() c.execute('select nick, address from follows where profile=?;', (str(self.agentAddress),)) follows = c.fetchall() db.close(conn) return follows
def price_of_car_model(model_id): db = car_type_pool.connection() cursor = db.cursor() sql = """SELECT `guide_price` FROM `car_model` WHERE 1 AND `id` = %d""" % (model_id) cursor.execute(sql) results = cursor.fetchall() if len(results) == 0: return -1 price = float(re.search(r'[\d\.]+', results[0][0]).group()) * 10000 db.close() return price
def car_series_of_all(): db = car_type_pool.connection() cursor = db.cursor() sql = """SELECT `id`, `name`, `brand_id`, `import_id`, `168_brand_id` FROM `car_series` LEFT JOIN `car_brand_map` ON `car_series`.`brand_id` = `car_brand_map`.`273_brand_id` WHERE 1 AND `car_series`.`status` = 1""" cursor.execute(sql) results = cursor.fetchall() cursor.close() db.close() return results
def all_hedge_rates(): db = car_type_pool.connection() cursor = db.cursor() sql = """SELECT `car_series_map`.`273_series_id`, `1st_year`, `2nd_year`, `3rd_year`, `4th_year`, `5th_year`, `6th_year`, `7th_year`, `8th_year`, `9th_year`, `10th_year` FROM `car_series_map` LEFT JOIN `car_series_hedge_rate` ON `car_series_hedge_rate`.`car_series_id` = `car_series_map`.`168_series_id` WHERE `car_series_map`.`273_series_id` = %d;""" % (series_id) cursor.execute(sql) results = cursor.fetchall() cursor.close() db.close() if len(results) == 0: return None return results
def save(result): import db count = len(result) i = 0.00 db.open() db.begin() for r in result: print '\b' * 8, '%2.2f%%' % (i * 100 / count), r['numbers'] = ''.join(r['numbers']) r['numbers'] = r['numbers'][:-2] + '+' + r['numbers'][-2:] db.tinsert('ssq', r) i += 1 db.commit() print '\b' * 8, '100.00%' db.close()
def car_series_deal_data(series_id, deal_province_id=None): db = wcar_pool.connection() cursor = db.cursor(cursorclass=MySQLdb.cursors.DictCursor) sql = """SELECT `card_time`, `deal_time`, `kilometer`, `guide_price`, `deal_price` FROM `car_deal_price_evaluate` LEFT JOIN `car_type`.`car_model` AS `car_model` ON `car_model`.`id` = `car_deal_price_evaluate`.`model_id` WHERE 1 AND `car_deal_price_evaluate`.`series_id` = %d""" % (series_id) if deal_province_id is not None: sql += " AND `deal_province_id` = %d" % (deal_province_id) cursor.execute(sql) results = cursor.fetchall() cursor.close() db.close() return list(results)
def price_of_replacement_car_model(series_id, model_id): db = car_type_pool.connection() cursor = db.cursor() sql = """SELECT `b`.`guide_price`, `b`.`model_year` FROM `car_model` `a`, (SELECT `id`, `sale_name`, `guide_price`, `model_year` FROM `car_model` WHERE `series_id` = %d) `b` WHERE `a`.`series_id` = %d and `a`.`id` = %d and `a`.`sale_name` = `b`.`sale_name` ORDER BY `model_year` DESC LIMIT 0, 1;""" % (series_id, series_id, model_id) cursor.execute(sql) results = cursor.fetchall() if len(results) == 0: return -1 price = float(re.search(r'[\d\.]+', results[0][0]).group()) * 10000 db.close() return price
def car_model_of_id(id): db = car_type_pool.connection() cursor = db.cursor(cursorclass=MySQLdb.cursors.DictCursor) sql = """SELECT `guide_price`, UNIX_TIMESTAMP(STR_TO_DATE(CONCAT(`market_year`, '/', IF(`market_month` > 0, `market_month`, 1), '/1'), '%Y/%m/%d')) AS `market_date`, `max_power`, `max_torque`, `max_speed`, `front_track`, `rear_track` FROM `car_model`""" sql += """WHERE `id` = %d AND `guide_price` > 0 LIMIT 0, 1;""" % (id) cursor.execute(sql) results = cursor.fetchall() cursor.close() db.close() if len(results) == 0: return None, None return results[0]
def close(self): """If the database keeps a connection, close it.""" for db in self.dbs: result = db.close() if result != self.OK: return result return self.OK