def get_or_downlod_districts(region_code): db = DB() if db.region_has_districts(region_code): return db.get_all_districts_by_region(region_code) else: district_to_db(region_code) return db.get_all_districts_by_region(region_code)
def recipe_ingredient_unit_options(value): if value is not None: db = DB() units = db.return_ingredient_units('ingredients','name',value) return [{'label': i, 'value': i} for i in units] else: return [{'label': '', 'value': ''}]
def main(): """Start Finder Terminal and create connection to DB""" db = DB() db.connect(os.path.join(os.getcwd(), 'finder.db')) terminal = BaseTerminal(commands=DEFAULT_COMMANDS) terminal.run()
def init(args): tree = VPTree(distance_fn=image.distance_fn) files = [] walkdir( args.dir, args.recursive, lambda path: init_func( path, tree, files, args.hash_type, args.hash_size)) db = DB(tree, files, args.hash_type, args.hash_size) db.encode(args.db)
def delete_directory(directory, commit=True): """Deletes an existing directory record from the database :param directory: Directory model object to be deleted :param commit: Specifies whether to commit to database """ DB().session.delete(directory) if commit: DB().session.commit()
def append_request(self, request_id): db = DB() request = db.get_request_by_id(request_id) if request.try_num < 3: logging.info('Add request to shedule id: ' + str(request_id)) self.queue.append(request) else: db.set_not_found(request_id)
def test_get_products_by_query(self): """ Test DB. Get products by query """ db = DB(filename=DATASET_TESTS_FILE) query = 'Multi' result = db.get_products_by_query(query=query) self.assertEqual(result, [('785', 50), ('8588', 30), ('12535', 15)], msg=result)
def test_get_products_by_query2(self): """ Test DB. Get products by query """ db = DB(filename=DATASET_TESTS_FILE) query = 'See By' result = db.get_products_by_query(query=query) self.assertEqual(result, [('63602', 30)], msg=result)
def test_db_connect_fail() -> None: try: DB.get_instance().setup_db('/fake_db.db') DB.get_instance() raise Exception('DB instance creation should have failed') except Exception as error_msg: assert str(error_msg ) == "Could not parse rfc1738 URL from string '/fake_db.db'"
def test_get_products_by_query_no_match(self): """ Test DB. Get products by query, with query without matchs """ db = DB(filename=DATASET_TESTS_FILE) query = 'aaaaaaa' result = db.get_products_by_query(query=query) self.assertEqual(result, [], msg=result)
def test_user_get_or_create() -> None: DB.get_instance().setup_db('sqlite://') test_user = DiscordUser("John", "Jonny", "1234") with DB.get_instance().make_session() as db_session: user_id = User.get_or_create(db_session=db_session, discord_user=test_user) assert user_id is not None assert user_id == User.get_or_create(db_session=db_session, discord_user=test_user)
def create_session(session, commit=True): """Creates a new session record in database :param session: Session model object to be inserted :param commit: Specifies whether to commit to database """ session_db = DB().session.add(session) if commit: DB().session.commit() return session_db
def delete_file(file, commit=True): """Deletes an existing file record from the database :param file: File model object to be deleted :param commit: Specifies whether to commit to database """ FileDao.remove_data_in_file(file) DB().session.delete(file) if commit: DB().session.commit()
def create_file(file, commit=True): """Creates a new file record in the database :param file: File model object to be inserted :param commit: Specifies whether to commit to database """ file_db = DB().session.add(file) if commit: DB().session.commit() return file_db
def create_directory(directory, commit=True): """Creates a new directory record in database :param directory: Directory model object to be inserted :param commit: Specifies whether to commit to database """ directory_db = DB().session.add(directory) if commit: DB().session.commit() return directory_db
def db_to_ingredient_callback(value): if value is not None: db = DB() ingredient = db.query_by_one_param('ingredients','name',value)[0] name = ingredient['name'] category = ingredient['category'] serving_gram = ingredient['serving_gram'] serving_tbsp = ingredient['serving_tbsp'] serving_oz = ingredient['serving_oz'] serving_lbs = ingredient['serving_lbs'] serving_piece = ingredient['serving_piece'] serving_ml = ingredient['serving_ml'] serving_cup = ingredient['serving_cup'] brand = ingredient['preferred_brand'] store = json.loads(ingredient['suggested_store']) calories = ingredient['calories'] protein = ingredient['protein'] fat = ingredient['fat'] carbs = ingredient['carbs'] fiber = ingredient['fiber'] sugar = ingredient['sugar'] sat = ingredient['saturated_fat'] mono = ingredient['monounsaturated_fat'] poly = ingredient['polyunsaturated_fat'] omega3 = ingredient['omega_3_fat'] omega6 = ingredient['omega_6_fat'] vitamin_a = ingredient['vitamin_a'] vitamin_c = ingredient['vitamin_c'] vitamin_d = ingredient['vitamin_d'] vitamin_e = ingredient['vitamin_e'] vitamin_k = ingredient['vitamin_k'] vitamin_b6 = ingredient['vitamin_b6'] vitamin_b12 = ingredient['vitamin_b12'] thiamin = ingredient['thiamin'] riboflavin = ingredient['riboflavin'] niacin = ingredient['niacin'] folate = ingredient['folate'] pantothenic_acid = ingredient['pantothenic_acid'] calcium = ingredient['calcium'] iron = ingredient['iron'] magnesium = ingredient['magnesium'] phosphorus = ingredient['phosphorus'] potassium = ingredient['potassium'] zinc = ingredient['zinc'] return name,category,serving_gram,serving_tbsp,serving_oz,\ serving_lbs,serving_piece,serving_ml,serving_cup,\ brand,store,calories,protein,fat,carbs,fiber,sugar,\ sat,mono,poly,omega3,omega6,vitamin_a,vitamin_c,\ vitamin_d,vitamin_e,vitamin_k,thiamin,\ riboflavin,niacin,vitamin_b6,folate,\ vitamin_b12,pantothenic_acid,calcium,\ iron,magnesium,phosphorus,potassium ,zinc else: return '','','','','','','','','','','','','','','','','','','','','','','','','','','','','','','','','','','','','','','',''
def __init__(self): self.session = requests.session() self.session.headers.update(headers) self.db = DB() self.crawl_timestamp = int() self.url = "https://3g.dxy.cn/newh5/view/pneumonia" self.rumor_url = "https://file1.dxycdn.com/2020/0127/797/3393185293879908067-115.json" self.overall_count = 0 self.province_count = 0 self.area_count = 0 self.news_count = 0 self.rumor_count = 0
def get_root_directory(): """Returns root directory, creates one if it does not exist""" root_db = DB().session.query(Directory).filter_by(is_root=True).first() if root_db is None: root_db = DirectoryDao._create_root_directory() return root_db
def regions_to_db(): fields = {'method': 'getRegionsList'} payload = utils.create_form_data(fields) res = requests.post('https://extra.egrp365.ru/api/extra/index.php', data=payload, headers=utils.HEADERS) try: json_data = json.loads(res.text) if json_data['success']: db = DB() for x in json_data['data']: region = Region(value=x['value'], name=x['name']) db.insert(region, 'region_code') except Exception as e: logging.error('Region loading error. Response text: ' + res.text) raise e
def add_new_message(self, discord_user: DiscordUser, timestamp: int, message_channel_id: int, message_word_count: int, message_char_count: int) -> None: """ Inserts message into the db, as well as user if it doesn't exist. @param discord_user: The message's user. @param timestamp: The message's timestamp in unix-timestamp. @param message_channel_id: The message's channel id. @param message_word_count: The message's word count. @param message_char_count: The message's character count. """ with DB.get_instance().make_session() as db_session: user_id = User.get_or_create(db_session=db_session, discord_user=discord_user, cache=self.user_cache) Message.add_message(db_session=db_session, user_id=user_id, channel_id=message_channel_id, timestamp=timestamp, word_count=message_word_count, char_count=message_char_count, cache=self.message_cache)
def initialize(self): self.data = {} self.set_header('Content-Type', 'application/json; charset=utf-8') self.set_header('Access-Control-Allow-Origin', '*') self.set_header('Access-Control-Allow-Methods', 'OPTIONS,GET,POST,PUT,PATCH,DELETE') self.set_header( 'Access-Control-Allow-Headers', 'Origin, X-Requested-With, Content-Type, Accept, Authorization') self.Authorization = self.request.headers.get('Authorization') self.JwtAuth = JwtAuth() self.User = {} self.isAuth = False if bool(self.Authorization): self.JwtAuth.autorize(self.Authorization) self.isAuth = self.JwtAuth.Status self.User = self.JwtAuth.User self.Header = self.request.headers._dict self.Header['RemoteIp'] = self.request.remote_ip self.DB = DB(self.User) if bool(self.request.body): self.data = json.loads(self.request.body.decode('utf-8'))
def __init__(self): self.vk = Vk(Config.token) self.db = DB(run_seeders=True) controller_essay = ControllerEssay() self.handlers = [ *ControllerBaseRules().handlers, *ControllerStatistics().handlers, *ControllerActionWithUser().handlers, *ControllerJokes().handlers, *ControllerSettings().handlers, *ControllerCallAdmin().handlers, *ControllerRandomPost().handlers, *controller_essay.handlers, *ControllerLowPriority().handlers ] self.special_handlers = [ handler for handler in self.handlers if handler.get('special') ] p = Process(target=controller_essay.proceed_essays, args=(self.vk, )) p.start()
def do_scan(self, target, options="-n -sS --host-timeout 600 --open"): # 记录结果 db = DB() nmproc = NmapProcess(target, options) nmproc.run_background() while nmproc.is_running(): print("Nmap Scan running: ETC: {0} DONE: {1}%".format( nmproc.etc, nmproc.progress)) time.sleep(2) print("rc: {0} output: {1}".format(nmproc.rc, nmproc.summary)) try: # 创建文件名 md5 = hashlib.md5() md5.update(target) hash = md5.hexdigest() # with(open("data/nmap/" + hash + ".xml", "w")) as f: # f.write(nmproc.stdout) except NmapParserException as e: print("Exception raised while parsing scan: {0}".format(e.msg)) # 扫描完成,解析结果 print {"status": 1, "result": hash} db.Nmap.update_one({"target": target}, {"$set": { "status": 1, "result": hash }})
def main(argv): logging.basicConfig( format= u'%(filename)s[LINE:%(lineno)d]# %(levelname)-8s [%(asctime)s] %(message)s', level=logging.DEBUG) db = DB() if db.table_is_empty('request'): logging.info('Table request is empty. Filling...') xslx_to_db() if db.table_is_empty('region_code'): logging.info('Table region_code is empty. Filling...') regions_to_db() search_all() insert_listner = Insertlistner(search_by_id) insert_listner.start() insert_listner.join()
def get_stats_grouped_by_time(self, channel_id: int, filter_ts=0) -> Dict[str, StatItem]: """ Groups stats by weekly intervals. @param channel_id: The Discord channel id number. @param filter_ts: Timestamp to use for filtering. @return: Dictionary of {str: StatItem} """ out_dict: Dict[str, StatItem] = defaultdict(lambda: StatItem()) sec_in_week = 60 * 60 * 24 * 7 with DB.get_instance().make_session() as db_session: query = ( db_session.query( User.name, # hack that compensates for SQLite not # having FLOOR function func.round((Message.timestamp / sec_in_week) - 0.5).label("day_time"), func.sum(Message.char_count)).join( Message, Message.user_id == User.id).filter( Message.channel_id == channel_id).filter( Message.timestamp > filter_ts).group_by( User.name, "day_time").order_by(asc("day_time"))) for name, timestamps, character_count in query: if "bot" in name: continue out_dict[name].timestamps.append(np.multiply(timestamps, 7)) out_dict[name].values.append(character_count) return out_dict
def get_files_from_current_directory(current_directory): """Return all files with in the current directory :param current_directory: Directory model object specifying current directory """ return DB().session.query(File).filter_by( directory_id=current_directory.id).all()
def get_directories_from_current_directory(current_directory): """Returns all directories inside a current directory :param current_directory: Directory model object specifying current directory """ return DB().session.query(Directory).filter_by( directory_id=current_directory.id).all()
def rebuild(args): db = decode(args.db) hash_size = args.hash_size if args.hash_size != 0 else db.hash_size hash_type = args.hash_type if len(args.hash_type) != 0 else db.hash_type imgs = [] files = [] for path in db.image_hashes: img = image.Image(path, hash_type, hash_size) imgs.append(img) files.append(img) tree = VPTree(distance_fn=image.distance_fn) tree.add_list(imgs) db = DB(tree, files, hash_type, hash_size) db.encode(args.db)
def add_ingredient_to_recipe(n_clicks,rows,name,quantity,unit): if n_clicks > 0 and n_clicks is not None: db = DB() ingredient = db.query_by_one_param('ingredients','name',name)[0] for i in ingredient.keys(): if i.find(unit) != -1: db_quantity = float(ingredient[i]) rows.append({ 'ingredient': name, 'quantity': quantity, 'unit': unit, 'calories': round(((float(quantity)/db_quantity)*float(ingredient['calories'])),2), 'protein': round(((float(quantity)/db_quantity)*float(ingredient['protein'])),2), 'fat': round(((float(quantity)/db_quantity)*float(ingredient['fat'])),2), 'carbs': round(((float(quantity)/db_quantity)*float(ingredient['carbs'])),2), }) return rows
def search(request): db = DB() db.increase_try_num(request.id) regions = db.get_all_regions() cur_region = request.region dists = [] for r in regions: dist = lv.distance(cur_region, r.name) dists.append((dist, r.value, r.name)) s = min(dists, key=lambda p: p[0]) logging.info('Found match for "{}" is "{}" with dist = {}'.format( cur_region, s[2], s[0])) cur_region_code = s[1] if ' г' in cur_region: search_in_city(request, cur_region_code) else: search_in_region(request, cur_region_code)
def main(): # Firebase domain and credential setup fb.setDomain() fb.setCredential() # connect database database = DB() # get watching anime animes = database.watching() for anime in animes: # if not currently watching anime, skip if not anime['watching']: continue print(f'Downloading episode {anime["episode"]} of {anime["name"]}') # NOTE: instantiate scraper scraper = Scraper( anime['url'] ) # get video link while True: try: videos = scraper.get( anime['episode'] ) break except RequestBlocked: time.sleep(TIMEOUT) # if videos cannot be found, skip if not videos: print(f'Cannot find download link for episode {anime["episode"]} of {anime["name"]}') continue filename = f'{anime["name"]} Episode-{anime["episode"]}{FILE_FORMAT}' # NOTE: use first download url only todownload = videos[0] # NOTE: instantiate downloader downloader = Downloader( DOWNLOAD_PATH ) downloader.download(filename, todownload) print(f'Downloaded episode {anime["episode"]} of {anime["name"]}') # increment episode number in firebase database.update(url=anime['url'], episode=anime['episode'] + 1)
def scan(self, request, context): ips = request.ips db = DB() for ip in ips: db.Nmap.insert({"target": ip, "status": 0}) print(ip) for ip in ips: self.do_scan(str(ip)) return nmap_pb2.ScanResponse(result="success")
class TestDB(unittest.TestCase): def setUp(self): self.actual_output = [] self.db = DB() self.longMessage = True self.test_data_dir = '{}/data/'.format(os.path.dirname(__file__)) def read_lines(self, filename): with open(filename, 'r') as f: result = [line.strip() for line in f if line.strip() != None] return result def test_basic_commands_1(self): self.worker('basic-1') def test_basic_commands_2(self): self.worker('basic-2') def test_transactional_commands_1(self): self.worker('transactional-1') def test_transactional_commands_2(self): self.worker('transactional-2') def test_transactional_commands_3(self): self.worker('transactional-3') def test_transactional_commands_4(self): self.worker('transactional-4') def worker(self, title): out_file = '{}{}-out.txt'.format(self.test_data_dir, title) expected_output = self.read_lines(out_file) queries = self.read_lines('{}{}-in.txt'.format(self.test_data_dir, title)) self.run_queries(queries) self.assertEqual( len(self.actual_output), len(expected_output), 'Query result differ in length from {}'.format(out_file)) for i, actual in enumerate(self.actual_output): self.assertEqual( str(actual), expected_output[i], 'query result line {} does not match {}'.format( i + 1, out_file)) def run_queries(self, queries): for query in queries: try: result = self.db.run(query) if result != None: self.actual_output.append(result) except: break
import os from db.db import DB from settings import DATASET_FILE, QUERIES_FILE, DATASET_TESTS_FILE from utils.read_files import read_queries_file __author__ = 'josebermudez' here = os.path.dirname(os.path.abspath(__file__)) if __name__ == '__main__': # Charge data base db = DB(filename=os.path.join(here, DATASET_FILE)) # Get queries queries = read_queries_file(filename=os.path.join(here, QUERIES_FILE)) # queries = ['Multi'] for num, query in enumerate(queries): print num+1 result = db.get_products_by_query(query) for key, value in result[:10]: print "%i,%i,%s,%s" % (int(value), int(key), db.get_name_by_id(product_id=key), db.get_brand_by_id(product_id=key))
def test_db_get_brand_by_id(self): """ Test DB. Get brand by id """ db = DB(filename=DATASET_TESTS_FILE) self.assertEqual('Miso', db.get_brand_by_id(product_id=785))
def test_db_get_name_by_id(self): """ Test DB. Get name by id """ db = DB(filename=DATASET_TESTS_FILE) self.assertEqual('Miso Beaded Bracelets Pack Multi Multi', db.get_name_by_id(product_id=785))