def copy_db_dev_prod(uri_db_dev, uri_db_prod): db_dev = get_db(uri_db_dev) db_prod = get_db(uri_db_prod) # PosterWeb.__table__.drop(db_prod) drop_posterweb(uri_db_prod) db_prod = get_db(uri_db_prod) data_dev = db_dev.query(Poster.id, Poster.closest_posters, Poster.title_display).all() data_prod = [ PosterWeb(x.id, x.closest_posters, x.title_display) for x in data_dev ] db_prod.bulk_save_objects(data_prod) db_prod.commit()
def main(argv): # arguments parsing parser = argparse.ArgumentParser() parser.add_argument('-c', '--config', help="config file (default: config/development.conf", default="./config/development.conf") args = parser.parse_args() config = utils.read_config(args.config) years = range(config['scraping']['years_range'][0], config['scraping']['years_range'][1] + 1) n_proc = config['scraping']['n_proc'] # create the folders in which the poster will be downloaded for year in years: utils.create_folder('{}/{}/posters'.format(PATH_IMGS, year)) utils.create_folder('{}/{}/thumbnails'.format(PATH_IMGS, year)) # Downloading the posters with multiprocessing (highly speed up compare to single process) print('Retrieve url of posters') with Pool(n_proc) as p: yearly_urls = p.map(get_yearly_url_imgs, years) yearly_urls = list(itertools.chain.from_iterable(yearly_urls)) # push to db session = db_manager.get_db(config['general']['db_uri']) objects = [db_manager.Poster(x) for x in yearly_urls] session.bulk_save_objects(objects) session.commit()
def check_login2(name, password): db = get_db() row = db.select("select id from user where name='%s' and password='******' limit 1" % (name, password)) if not row or len(row) == 0: return False user_id = row[0][0] access_token = "%d" % user_id # redis_store.set("dd.access%s" % access_token, user_id) return (user_id, access_token)
def cache_users_data(): db = get_db() all_users = db.select("select * from user") db.close() for u in all_users: user_cache["dd.user%s.password%s" % (u[1], u[2])] = u[0] r = get_redis_store() user_cache["dd.user.min_id"] = int(r.hget("dd.user", "min_id")) user_cache["dd.user.max_id"] = int(r.hget("dd.user", "max_id"))
def cache_foods_data(): r = get_redis_store() food_cache["dd.food.min_id"] = int(r.hget("dd.food", "min_id")) food_cache["dd.food.max_id"] = int(r.hget("dd.food", "max_id")) food_cache["dd.food.json"] = r.get("dd.food.json") db = get_db() all_foods = db.select("select * from food") db.close() for f in all_foods: food_cache["dd.food%d.price" % f[0]] = f[2]
def create_db_prod(uri_db_dev, uri_db_prod, path_img, path_thumb): copy_db_dev_prod(uri_db_dev, uri_db_prod) db_dev = get_db(uri_db_dev) create_folder(path_img) create_folder(path_thumb) data_posters = db_dev.query(Poster.id, Poster.base64_img, Poster.base64_thumb).all() for p in data_posters: img = pil_image.open(BytesIO(base64.b64decode(p.base64_img))) img.save('{}/{}.jpg'.format(path_img, p.id)) img_thumb = pil_image.open(BytesIO(base64.b64decode(p.base64_thumb))) img_thumb.save('{}/{}.jpg'.format(path_thumb, p.id))
def main(argv): parser = argparse.ArgumentParser() parser.add_argument('-c', '--config', help="config file (default: config/development.conf", default="./config/development.conf") args = parser.parse_args() config = utils.read_config(args.config) db = db_manager.get_db(config['general']['db_uri']) data = db.query(Poster).all() _ = get_2d_features(data, db, config) _ = get_closest_features(data, db, config)
def main(argv): parser = argparse.ArgumentParser() parser.add_argument('-c', '--config', help="config file (default: config/development.conf", default="./config/development.conf") args = parser.parse_args() config = utils.read_config(args.config) # Load VGG16 or ResNet50, guys you better have a GPU... model = load_model(config) db = db_manager.get_db(config['general']['db_uri']) data_features = get_features(model, db) db.commit() return data_features
def save(info_raw): db = db_manager.get_db() cursor = db.cursor() cursor.execute("insert into info_raw(title) values('lalala')") db.commit() db.close()
#!/usr/bin/env python # -*- coding: utf-8 -*- from db_manager import get_db, get_redis_store import json # cache all data in redis db = get_db() rows = db.select('select min(id) from food') min_food_id = rows[0][0] rows = db.select('select max(id) from food') max_food_id = rows[0][0] rows = db.select('select min(id) from user') min_user_id = rows[0][0] rows = db.select('select max(id) from user') max_user_id = rows[0][0] all_foods = db.select('select * from food', is_dict=True) all_users = db.select('select * from user') db.close() myr = get_redis_store() myr.flushdb() myr.hset('dd.food', 'min_id', min_food_id) myr.hset('dd.food', 'max_id', max_food_id) myr.hset('dd.user', 'min_id', min_user_id) myr.hset('dd.user', 'max_id', max_user_id) myr.set('dd.food.json', json.dumps(all_foods))