def remove_manager(self, db_session: Session, manager_id): manager = db_session.query(Manager).get(manager_id) if manager is None: raise NoResultFound("Manager not found") db_session.delete(manager) db_session.commit() return manager
def remove_time_table(self, db_session: Session, time_table_id: int): time_table = db_session.query(TimeTable).get(time_table_id) if time_table_id is None: raise NoResultFound('Time Table not found') db_session.delete(time_table) db_session.commit() return time_table
def is_current_user_admin(self, db_session: Session, user_id: int, community_id: int): community = (db_session.query(Community).options( lazyload("admins")).get(community_id)) if community is None: raise NoResultFound("Community not found") user = db_session.query(User).get(user_id) if user is None: raise NoResultFound("User not found") if user not in community.admins and not user.is_admin: return False return True
def test_manage_model(): """ Test for create a new table at memory database :return: """ engine = Engine.create(ini=ini) # add new data with Session(engine=engine) as session: session.add(Domain('test', 'https://formed_url.onion')) session.commit() with Session(engine=engine) as session: assert session.query(Domain).filter(Domain.uuid == 'test').count() == 1 assert session.query(Domain).filter( Domain.uuid == 'is_not_exist').count() == 0
def get_by_vk_id(self, db_session: Session, community_vk_id: int): community = (db_session.query(Community).options( lazyload("managers"), lazyload("admins")).filter_by( community_vk_id=community_vk_id).first()) if community is None: raise NoResultFound("Community not found") return community
def get_time_table(self, db_session: Session, manager_id: int, day: str) -> TimeTable: time_table = db_session.query(TimeTable).filter_by(manager_id=manager_id)\ .filter_by(day_of_the_week=day)\ .order_by(TimeTable.start_work)\ .all() return time_table
def login(email, password, **kwargs): ''' Authenticate user credentials --- tags: - user responses: 200: description: OK content: application/json: schema: type: object properties: success: type: boolean message: type: string session: type: object properties: token: type: string expires: type: string ''' try: email_results = validate_email(email) email = '{0}@{1}'.format(email_results.local_part.lower(), email_results.domain) except EmailNotValidError as ex: # Treat verification failure as normal login failure return jsonify({'success': False, 'message': 'Invalid login details'}) user = db.session.query(User).filter(User.email == email).limit(1).first() if (user == None or not user.verify_password(password)): return jsonify({'success': False, 'message': 'Invalid login details'}) session = Session(user_id=user.user_id, expires=datetime.now() + timedelta(days=1)) db.session.add(session) db.session.commit() session_data = session.dump() return jsonify({'success': True, 'message': '', 'session': session_data})
def test_database_session(): """ Test for connect database session :return: """ engine = Engine.create(ini=ini) with Session(engine=engine) as session: assert session
def connect(self): cnx = connector.connect(host=self.config.host, user=self.config.username, database=self.config.database, password=self.config.password, collation='utf8_unicode_ci', charset='utf8') return Session(cnx)
def test_session(self): # generate object and save to db current_time = datetime.utcnow() session = Session(0, current_time) db.session.add(session) db.session.commit() # attempt to fetch session = db.session.query(Session).limit(1).first() # test explicit data self.assertEqual(session.user_id, 0) self.assertEqual(session.expires, current_time) # test implicit data self.assertLess(session.created_at, datetime.utcnow()) self.assertTrue(session.is_expired()) # cleanup db.session.delete(session) db.session.commit()
def save(self, id, obj): """Save crawled data into database.""" Log.i("Saving crawled data") meta = { 'id': id, } engine = Engine.create(ini=self.ini) with Session(engine=engine) as session: domain = session.query(Domain).filter_by(uuid=id).first() engine.dispose() # pass the pipeline before saving data (for preprocessing) for pipeline in pipelines.__all__: _class = pipeline(domain, data=obj, ini=self.ini) if _class.active: Log.d(f"handling the {_class.name} pipeline") try: _class.handle() except: Log.e(f"Error while handling {_class.name} pipeline") else: Log.d(f"{_class.name} pipeline isn't active") del _class with Elastic(ini=self.ini): # upload screenshot at Amazon S3 screenshot = self.upload_screenshot(obj.webpage.screenshot, id) Webpage( meta=meta, url=obj.webpage.url, domain=obj.webpage.domain, title=obj.webpage.title, time=datetime.now(), source=obj.webpage.source, screenshot=screenshot, language=obj.webpage.language, headers=obj.webpage.headers, tree=obj.webpage.tree, ).save() Port(meta=meta, services=[ Service(number=port['number'], status=port['status']) for port in obj.port ]).save()
def set_settings(self, db_session: Session, setting_to_set: SettingSet): new_setting = Setting(welcome_speech=setting_to_set.welcome_speech, color_button=setting_to_set.color_button, community_id=setting_to_set.community_id) db_session.add(new_setting) db_session.commit() db_session.refresh(new_setting) return new_setting
def add_time_table(self, db_session: Session, time_to_set: TimeTableSet): new_time_table = TimeTable(manager_id=time_to_set.manager_id, day_of_the_week=time_to_set.day_of_the_week, start_work=time_to_set.start_work, end_work=time_to_set.end_work) db_session.add(new_time_table) db_session.commit() db_session.refresh(new_time_table) return new_time_table
def update_settings(self, db_session: Session, community_id: int, setting_to_update: SettingUpdate): update_setting = db_session.query(Setting).filter_by( community_id=community_id).first() if update_setting is None: raise NoResultFound('Setting not found') update_setting.welcome_speech = setting_to_update.welcome_speech update_setting.color_button = setting_to_update.color_button db_session.add(update_setting) db_session.commit() db_session.refresh(update_setting) return update_setting
def update_time_table(self, db_session: Session, time_table_id: int, time_to_update: TimeTableUpdate): update_time = db_session.query(TimeTable).filter_by( id=time_table_id).first() if update_time is None: raise NoResultFound('Time Table not found') update_time.day_of_the_week = time_to_update.day_of_the_week update_time.start_work = time_to_update.start_work update_time.end_work = time_to_update.end_work db_session.add(update_time) db_session.commit() db_session.refresh(update_time) return update_time
def handle(self): super(BitcoinPipeline, self).handle() addresses = re.findall(r'([13][a-km-zA-HJ-NP-Z0-9]{26,33})', self.data.webpage.source) engine = Engine.create(ini=self.ini) with Session(engine=engine) as session: for address in addresses: if self.validate_address(address): Log.d("{} address is valid address".format(address)) instance = get_or_create(session, Address, address=address) instance.domains.append(self.domain) session.add(instance) session.commit() engine.dispose()
def add_manager(self, db_session: Session, community_id: int, manager_to_add: ManagerCreate): community = (db_session.query(Community).options( lazyload("managers")).get(community_id)) manager = Manager( phone=manager_to_add.phone, name=manager_to_add.name, is_blocked=manager_to_add.is_blocked, ) community.managers.append(manager) db_session.add(community) db_session.commit() db_session.refresh(community) return community
def _end_guessing_game(self, user): if not self.state['running']: self.logger.info('Guessing game not running') return None self.guesses['item'] = deque() self.guesses['medal'] = deque() self.guesses['song'] = deque() self.state['running'] = False self.state['freebie'] = None self.state['mode'].clear() self.state['songs'].clear() self.state['medals'].clear() self.database['streamer'].sessions.append( self.database['current-session']) self.database['latest-session'] = self.database['current-session'] self.database['current-session'] = Session() for participant in self.database['streamer'].participants: participant.session_points = 0 self.database['streamer'].save() self.database['streamer'].reload() filename = str(datetime.now()).replace(':', '_') file = os.path.join(os.path.curdir, 'reports', filename + '.csv') amazon_s3 = boto3.resource('s3') if not os.path.exists(file): try: os.makedirs(os.path.dirname(file)) except OSError as exc: if exc.errno != errno.EEXIST: raise report_writer = csv.writer(open(file, 'w')) for guess in self.database['latest-session'].guesses: report_writer.writerow([ guess.timestamp, guess.participant, guess.participant_name, guess.guess_type, guess.guess, guess.session_points, guess.total_points ]) bucket = amazon_s3.Bucket(os.environ['S3_BUCKET']) bucket.upload_file(file, str(datetime.now()) + '.csv', ExtraArgs={'ACL': 'public-read'}) message = 'Guessing game ended by %s' % user['username'] self.logger.info(message) return message
def create(self, db_session: Session, community_to_create: CommunityCreate, user_id: int): info = services.vk_service.get_community_info( community_to_create.api_key, community_to_create.community_vk_id) community = Community( community_vk_id=community_to_create.community_vk_id, avatar_url=info["photo_200"], name=info["name"], ) user = db_session.query(User).get(user_id) if user is not None: community.admins.append(user) db_session.add(community) db_session.commit() db_session.refresh(community) return community
def remove(self, db_session: Session, community_id: int, user_id: int): community = (db_session.query(Community).options( lazyload("admins")).get(community_id)) if community is None: raise NoResultFound("Community not found") user = db_session.query(User).get(user_id) if user is None: raise NoResultFound("User not found") if user not in community.admins: raise Exception("User is not admin of this community") db_session.delete(community) db_session.commit() return community
def download_recipe_meta_data(cls): """Downloads recipe metadata""" session = Session() # loading bar based on recipe count recipe_count = 1 recipe_batch_count = cls.get_recipe_batch_count() cls.logger.info('Parsing and saving recipe meta data') with tqdm(total=recipe_batch_count, unit=' recipes') as pbar: for page in range(1, cls.max_api_page): req = requests.get(RECIPE_API, params={'page': page}) soup = BeautifulSoup(req.text, 'html.parser') section = soup.find('section', {'id': 'archive-recipes'}) recipes = section.findAll('li') for recipe_html in recipes: if recipe_count > RECIPE_BATCH_COUNT: return slug = recipe_html.a['href'].replace( '/plant-based-recipes/', '') recipe = get_or_create(session, Recipe, slug=slug) recipe.name = recipe_html.img['title'] cls.logger.info('Parsing recipe: ' + recipe.name) recipe.slug = slug recipe.url = 'https://' + cls.hostname + \ str(recipe_html.a['href']) recipe.origin = SERVICE_CODE recipe.country = 'US' assets = [] thumbnail_url = recipe_html.img['src'] thumbnail = get_or_create( session, Asset, url=thumbnail_url) thumbnail.type = 'thumbnail' thumbnail.url = thumbnail_url assets.append(thumbnail) recipe.assets = assets session.add(recipe) session.commit() pbar.update(1) recipe_count += 1 page += 1
def save(self): """ Save domain on database and request crawling. :return: None """ engine = Engine.create(self.ini) with Session(engine=engine) as session: for url in self.urls: task_id = uuid4().hex try: # add url into database session.add(Domain(uuid=task_id, url=url)) session.commit() task = run_crawler.apply_async(args=(url, ), task_id=task_id) Log.i("Crawler issued a new task id {} at {}".format( task.task_id, url)) except: Log.d( "This {} url already saved into database.".format(url)) finally: self.urls.remove(url)
def create_user(self, db_session: Session, user_to_create: UserCreate): existing_user = self.get_by_username(db_session, user_to_create.username) if existing_user is not None: raise Exception('User already exists') new_user = User( username=user_to_create.username, first_name=user_to_create.first_name, last_name=user_to_create.last_name, is_admin=user_to_create.is_admin, vk_id=user_to_create.vk_id, avatar_url=user_to_create.avatar_url, email=user_to_create.email, phone=user_to_create.phone, password=user_to_create.password, ) new_user.set_password(user_to_create.password) db_session.add(new_user) db_session.commit() db_session.refresh(new_user) return new_user
def get_settings(self, db_session: Session, community_id: int) -> Setting: settings = db_session.query(Setting).filter_by( community_id=community_id).first() return settings
async def db_session_middleware(request: Request, call_next): request.state.db = Session() response = await call_next(request) request.state.db.close() return response
def download_recipe_data(cls): session = Session() recipes_dto = session.query(Recipe).filter( Recipe.origin == SERVICE_CODE).all() cls.logger.info('Parsing and saving recipe data') for recipe_dto in tqdm(recipes_dto, unit=' recipes'): cls.logger.info('Downloading recipe: ' + recipe_dto.name) recipe_html = requests.get(recipe_dto.url) soup = BeautifulSoup(recipe_html.text, 'html.parser') # Main Image tag = soup.find('source', {'media': '(max-width: 1199px)'}) image_url = tag['srcset'] image = get_or_create(session, Asset, url=image_url) image.type = 'image' image.url = image_url recipe_dto.assets.append(image) # Description description = soup.find( 'section', {'class': 'recipe-description'}).find('p') recipe_dto.description = description.text # Summary uls = soup.find('div', {'class': 'recipe-side-note'}).findAll('ul') li = uls[0].findAll('li') prep = li[0].text.split(':') time = prep[1].replace('minutes', 'M').replace(' ', '') recipe_dto.time = time[:time.find('M')+1] servings = li[1].text.split(':') recipe_dto.servings = servings[1] # Nutrition for li in uls[1].findAll('li'): nutrition = li.text.split(':') nutrition_name = nutrition[0].strip() nutrition_code = nutrition_name.lower().replace(' ', '-') nutrition_dto = get_or_create( session, Nutrition, code=nutrition_code) nutrition_dto.name = nutrition_name nutrition_amount = nutrition[1].strip() nutrition_unit = None if nutrition_code == 'calories': nutrition_unit = 'cal' else: nutrition_unit = 'g' recipe_nutrition_dto = get_or_create( session, RecipeNutrition, recipe=recipe_dto, nutrition=nutrition_dto) recipe_nutrition_dto.amount = nutrition_amount recipe_nutrition_dto.unit = nutrition_unit # Ingredients main_recipe = soup.find('section', {'class': 'main-recipe'}) ingredients = main_recipe.find('ol').findAll('li') ingredient_parser = IngredientParser() for ingredient in ingredients: recipe_ingredient_dtos = ingredient_parser.clense_ingredients( ingredient.string) if recipe_ingredient_dtos: for recipe_ingredient_dto in recipe_ingredient_dtos: ingredient_dto = get_or_create( session, Ingredient, code=recipe_ingredient_dto.ingredient.code) ingredient_dto.name = recipe_ingredient_dto.ingredient.name recipe_ingredient = get_or_create( session, RecipeIngredient, recipe=recipe_dto, ingredient=ingredient_dto) recipe_ingredient.ingredient = ingredient_dto if recipe_ingredient_dto.amount is not None: recipe_ingredient.amount = recipe_ingredient_dto.amount if recipe_ingredient_dto.unit is not None: recipe_ingredient.unit = recipe_ingredient_dto.unit # Instructions steps = soup.find( 'section', {'class': 'recipe-instruct'}).findAll('div', {'class': 'row'}) stepNbr = 1 for step in steps[1:]: instruction_dto = get_or_create( session, Instruction, recipe=recipe_dto, step=stepNbr) instruction_dto.description = step.find( 'p', {'class': 'instruction-description'}).text instruction_image_dto = get_or_create( session, Asset, instruction=instruction_dto, type='image') instruction_image_dto.url = step.find('img')['src'] stepNbr += 1 session.commit()
def get(self, db_session: Session, community_id: int): community = (db_session.query(Community).options( lazyload("managers"), lazyload("admins")).get(community_id)) if community is None: raise NoResultFound("Community not found") return community
def signup(name, email, password, **kwargs): ''' Creates a new user --- tags: - user responses: 200: description: OK content: application/json: schema: type: object properties: success: type: boolean message: type: string session: type: object properties: token: type: string expires: type: string ''' # Validate name min_length = 2 max_length = User.__table__.c['name'].type.length if (len(name) < min_length): return jsonify({ 'success': False, 'message': 'Name should be at least {0} characters long'.format(min_length) }) if (len(name) > max_length): return jsonify({ 'success': False, 'message': 'Name should be at most {0} characters long'.format(max_length) }) # Validate email try: email_results = validate_email(email) #email = email_results.email email = '{0}@{1}'.format(email_results.local_part.lower(), email_results.domain) if email_results.domain != 'cpp.edu': return jsonify({ 'success': False, 'message': 'A \'@cpp.edu\' email address is required' }) except EmailNotValidError as ex: return jsonify({'success': False, 'message': str(ex)}) # Ensure strong password password_results = zxcvbn(password, user_inputs=[name, email]) if (password_results['score'] < 2): suggestions = password_results['feedback']['suggestions'] response = {'success': False, 'message': 'Your password is too weak'} if (len(suggestions) > 0): response['message'] += ' - {0}'.format(suggestions[0]) return jsonify(response) # Finally create user and session try: user = User(email=email, name=name, password=password) db.session.add(user) db.session.commit() except IntegrityError as ex: return jsonify({ 'success': False, 'message': 'Email already registered' }) session = Session(user_id=user.user_id, expires=datetime.now() + timedelta(days=1)) db.session.add(session) db.session.commit() session_data = session.dump() return jsonify({'success': True, 'message': '', 'session': session_data})
def init_parts(self): self.user = User(self.db) self.session = Session(self.db) self.content = Content(self.db)
def _get_sessions(self): self.database['current-session'] = Session() if self.database['streamer'].sessions: return self.database['streamer'].sessions[ len(self.database['streamer'].sessions) - 1] return None