def test_like_case(self): expr = Like("name", "value") statement = compile(expr) self.assertEquals(statement, "? LIKE ?") expr = Like("name", "value", case_sensitive=True) statement = compile(expr) self.assertEquals(statement, "? LIKE ?") expr = Like("name", "value", case_sensitive=False) statement = compile(expr) self.assertEquals(statement, "? ILIKE ?")
def test_like_case(self): expr = Like("name", "value") statement = compile(expr) assert statement == "? LIKE ?" expr = Like("name", "value", case_sensitive=True) statement = compile(expr) assert statement == "? LIKE ?" expr = Like("name", "value", case_sensitive=False) statement = compile(expr) assert statement == "? ILIKE ?"
def test_case_insensitive_like(self): like = Like(SQLRaw("description"), u"%hullah%", case_sensitive=False) expr = Select(SQLRaw("id"), like, tables=["like_case_insensitive_test"]) result = self.connection.execute(expr) self.assertEquals(result.get_all(), [(1, ), (2, )]) like = Like(SQLRaw("description"), u"%HULLAH%", case_sensitive=False) expr = Select(SQLRaw("id"), like, tables=["like_case_insensitive_test"]) result = self.connection.execute(expr) self.assertEquals(result.get_all(), [(1, ), (2, )])
def test_case_default_like(self): like = Like(SQLRaw("description"), u"%hullah%") expr = Select(SQLRaw("id"), like, tables=["like_case_insensitive_test"]) result = self.connection.execute(expr) assert result.get_all() == [(1, )] like = Like(SQLRaw("description"), u"%HULLAH%") expr = Select(SQLRaw("id"), like, tables=["like_case_insensitive_test"]) result = self.connection.execute(expr) assert result.get_all() == [(2, )]
def __find_folder(self, path): path = os.path.dirname(path) folders = self.__store.find(Folder, Folder.path == path) count = folders.count() if count > 1: raise Exception("Found multiple folders for '{}'.".format(path)) elif count == 1: return folders.one() db = self.__store.get_database().__module__[len('storm.databases.'):] folder = self.__store.find( Folder, Like(path, Concat(Folder.path, os.sep + u'%', db))).order_by(Folder.path).last() full_path = folder.path path = path[len(folder.path) + 1:] for name in path.split(os.sep): full_path = os.path.join(full_path, name) fold = Folder() fold.root = False fold.name = name fold.path = full_path fold.parent = folder self.__store.add(fold) folder = fold return folder
def check_unique_tuple_exists(self, values, case_sensitive=True): """Returns ``True`` if we already have the given attributes and values in the database, but ignoring myself. :param values: dictionary of attributes:values that we will check if exists in the database. :param case_sensitive: If the checking should be case sensitive or not. """ if all([value in ['', None] for value in values.values()]): return False clauses = [] for attr, value, in values.items(): if not isinstance(value, unicode) or case_sensitive: clauses.append(attr == value) else: clauses.append(Like(attr, value, case_sensitive=False)) cls = type(self) # Remove myself from the results. if hasattr(self, 'id'): clauses.append(cls.id != self.id) query = And(*clauses) return not self.store.find(cls, query).is_empty()
def _parse_string_state(self, state, table_field): if not state.text: return text = u'%%%s%%' % state.text.lower() retval = Like(table_field, text, case_sensitive=False) if state.mode == StringQueryState.NOT_CONTAINS: retval = Not(retval) return retval
def query(self, column, value): """Returns the query for each column type""" cls = column.cls for column_cls in cls.__mro__: column_type = type(column_cls.__dict__.get(column.name)) if column_type is Unicode: return Like(column, '%%%s%%' % value, case_sensitive=False) if column_type is Int: return column == int(value) raise
def find(self, **options): """ Searches product histories. :param optiosn: :return: """ from_edit_date = options.get('from_edit_date') to_edit_date = options.get('to_edit_date') from_price = options.get('from_price') to_price = options.get('to_price') name = options.get('name') categories = options.get('categories') include_out_of_stock = options.get('include_out_of_stock') if include_out_of_stock is None: include_out_of_stock = False expressions = [] if not include_out_of_stock: expressions.append( ProductsHistoryEntity.product_history_status == ProductsHistoryEntity.ProductHistoryStatusEnum.IN_STOCK) if from_edit_date is not None: expressions.append(ProductsHistoryEntity.product_history_edit_date >= from_edit_date) if to_edit_date is not None: expressions.append( ProductsHistoryEntity.product_history_edit_date <= to_edit_date ) if from_price is not None: expressions.append( ProductsHistoryEntity.product_history_price >= from_price) if to_price is not None: expressions.append( ProductsHistoryEntity.product_history_price >= to_price) if name is not None and name.strip() != "": expressions.append( Like(ProductsHistoryEntity.product_history_name, "%{0}%".format(name))) if categories is not None and len(categories) > 0: expressions.append( In(ProductsHistoryEntity.product_history_category, categories)) store = get_current_transaction_store() entities = store.find( ProductsHistoryEntity, And(*expressions)).order_by( ProductsHistoryEntity.product_history_edit_date) results = [] for entity in entities: results.append(DynamicObject(entity_to_dic(entity))) return results
def extractTrendingHashtags(store, limit=10, duration=None): """Extract information about trending hashtags and store it in FluidDB. @param store: The storm store to query and to save our result to. @param limit: Optionally, the number of objects to retrieve. @param duration: Optionally, the recent time period to look at when determining which hashtags are trending. Default is 28 days. The storm query below results in SQL like: SELECT COUNT(DISTINCT comments.object_id) AS count, about_tag_values.value, array_agg(ROW(comments.username, comments.creation_time)) FROM about_tag_values, comment_object_link, comments WHERE about_tag_values.value LIKE '#%' AND about_tag_values.object_id = comment_object_link.object_id AND comments.object_id = comment_object_link.comment_id AND comments.creation_time >= '2012-11-09 07:42:40'::TIMESTAMP AND CHAR_LENGTH(about_tag_values.value) >= 2 GROUP BY about_tag_values.value ORDER BY count DESC LIMIT 10 """ duration = timedelta(days=28) if duration is None else duration startTime = datetime.utcnow() - duration count = Alias(Count(Comment.objectID, distinct=True)) result = store.find( (count, AboutTagValue.value, Func('array_agg', Row(Comment.username, Comment.creationTime))), Like(AboutTagValue.value, u'#%'), AboutTagValue.objectID == CommentObjectLink.objectID, Comment.objectID == CommentObjectLink.commentID, Comment.creationTime >= startTime, Func('CHAR_LENGTH', AboutTagValue.value) >= 2) result.group_by(AboutTagValue.value) result.order_by(Desc(count)) result.config(limit=limit) data = [{ 'count': count, 'usernames': _sortUsernames(usernames), 'value': hashtag } for count, hashtag, usernames in result] user = getUser(u'fluidinfo.com') tagValues = TagValueAPI(user) objectID = ObjectAPI(user).create(u'fluidinfo.com') tagValues.set( {objectID: { u'fluidinfo.com/trending-hashtags': json.dumps(data) }}) store.commit()
def __find_root_folder(self, path): path = os.path.dirname(path) db = self.__store.get_database().__module__[len('storm.databases.'):] folders = self.__store.find(Folder, Like(path, Concat(Folder.path, u'%', db)), Folder.root == True) count = folders.count() if count > 1: raise Exception( "Found multiple root folders for '{}'.".format(path)) elif count == 0: raise Exception( "Couldn't find the root folder for '{}'.\nDon't scan files that aren't located in a defined music folder" .format(path)) return folders.one()
def check_unique_tuple_exists(self, values, case_sensitive=True): """Check database for values presence Check if we already the given attributes and values in the database, but ignoring this object's ones. :param values: dictionary of attributes:values that we will check if exists in the database. :param case_sensitive: If the checking should be case sensitive or not. :returns: the existing object or ``None`` """ if all([value in ['', None] for value in values.values()]): return None clauses = [] for attr, value, in values.items(): self.__class__.validate_attr(attr) if not isinstance(value, str) or case_sensitive: clauses.append(attr == value) else: clauses.append(Like(attr, value, case_sensitive=False)) cls = type(self) # Remove myself from the results. if hasattr(self, 'id'): clauses.append(cls.id != self.id) query = And(*clauses) try: return self.store.find(cls, query).one() except NotOneError: # FIXME: Instead of breaking stoq if more than one tuple exists, # simply return the first object, but log a warning about the # database issue. We should have UNIQUE constraints in more places # to be sure that this would never happen values_str = ["%s => %s" % (k.name, v) for k, v in values.items()] log.warning( "more than one result found when trying to " "check_unique_tuple_exists on table '%s' for values: %r" % (self.__class__.__name__, ', '.join(sorted(values_str)))) return self.store.find(cls, query).any()
def find(self, **options): """ Searches products. :param optiosn: :return: """ current_user = get_current_user() from_creation_date = options.get('from_creation_date') to_creation_date = options.get('to_creation_date') from_price = options.get('from_price') to_price = options.get('to_price') name = options.get('name') size = options.get('size') brand = options.get('brand') categories = options.get('categories') if not isinstance(categories, (list, tuple)): categories = [categories] age_categories = options.get('age_categories') if not isinstance(age_categories, (list, tuple)): age_categories = [age_categories] gender = options.get('gender') if not isinstance(gender, (list, tuple)): gender = [gender] include_out_of_stock = options.get('include_out_of_stock') if include_out_of_stock is None: include_out_of_stock = False wholesale_type = options.get('wholesale_type') if wholesale_type in (None, -1): wholesale_type = ProductsEntity.ProductWholesaleTypeEnum.RETAIL if wholesale_type == ProductsEntity.ProductWholesaleTypeEnum.WHOLESALE: if current_user.user_production_type != UserEntity.UserProductionTypeEnum.PRODUCER: raise ProductsException( "Consumer user can not search wholesale products.") just_current_user = options.get('just_current_user') if just_current_user is None: just_current_user = False if just_current_user and current_user.user_production_type != UserEntity.UserProductionTypeEnum.PRODUCER: raise ProductsException( "Consumer user can not search its own products.") expressions = [ ProductsEntity.product_whole_sale_type == wholesale_type ] if not include_out_of_stock: expressions.append(ProductsEntity.product_status == ProductsEntity.ProductStatusEnum.IN_STOCK) if from_creation_date is not None: if not isinstance(from_creation_date, datetime.datetime): from_creation_date = parser.parse(from_creation_date) expressions.append( ProductsEntity.product_creation_date >= from_creation_date) if to_creation_date is not None: if not isinstance(to_creation_date, datetime.datetime): to_creation_date = parser.parse(to_creation_date) expressions.append( ProductsEntity.product_creation_date <= to_creation_date) if from_price not in (None, 0, "", "0"): expressions.append( ProductsEntity.product_price >= Decimal(from_price)) if to_price not in (None, 0, "", "0"): expressions.append( ProductsEntity.product_price <= Decimal(to_price)) if name is not None and name.strip() != "": name = unicode(name) expressions.append( Like(ProductsEntity.product_name, "%{0}%".format(name.strip()))) if size is not None and size.strip() != "": size = unicode(size) expressions.append( Exists( Select(columns=[1], where=And( ProductsSizesEntity.product_id == ProductsEntity.product_id, Like(ProductsSizesEntity.product_size, "%{0}%".format(size.strip()))), tables=[ProductsSizesEntity]))) if brand is not None and brand.strip() != "": brand = unicode(brand) expressions.append( Exists( Select(columns=[1], where=And( ProductsBrandsEntity.product_id == ProductsEntity.product_id, Like(ProductsBrandsEntity.product_brand, "%{0}%".format(brand.strip())))))) if categories is not None and len( categories) > 0 and -1 not in categories: expressions.append(In(ProductsEntity.product_category, categories)) if age_categories is not None and len( age_categories) > 0 and -1 not in age_categories: expressions.append( In(ProductsEntity.product_age_category, age_categories)) if gender is not None and len(gender) > 0 and -1 not in gender: expressions.append(In(ProductsEntity.product_gender, gender)) if just_current_user: expressions.append( ProductsEntity.product_producer_user_id == current_user.id) offset = options.get("__offset__") if offset is None: offset = 0 else: offset = int(offset) limit = options.get("__limit__") if limit in (None, 0): limit = Undef else: limit = int(limit) statement = \ Select(columns=[ProductsEntity.product_id, ProductsEntity.product_name, ProductsEntity.product_category, ProductsEntity.product_image, ProductsEntity.product_age_category, ProductsEntity.product_comment, ProductsEntity.product_creation_date, ProductsEntity.product_price, ProductsEntity.product_gender, ProductsEntity.product_whole_sale_type], where=And(*expressions), tables=[ProductsEntity], order_by=[Desc(ProductsEntity.product_creation_date)], offset=offset, limit=limit) store = get_current_transaction_store() results = [] for (product_id, product_name, product_category, product_image, product_age_category, product_comment, product_creation_date, product_price, product_gender, product_whole_sale_type) in store.execute(statement): results.append( DynamicObject(product_id=product_id, product_name=product_name, product_category=product_category, product_image=product_image, product_age_category=product_age_category, product_comment=product_comment, product_creation_date=product_creation_date, product_price=product_price, product_gender=product_gender, product_whole_sale_type=product_whole_sale_type, product_colors=self.get_product_colors( product_id, concat_results=True), product_sizes=self.get_product_sizes( product_id, concat_results=True), product_brands=self.get_product_brands( product_id, concat_results=True))) return results
def __init__(self, expr, string): string = string.replace("!", "!!") \ .replace("_", "!_") \ .replace("%", "!%") Like.__init__(self, expr, "%" + string + "%", SQLRaw("'!'"))
def _like(value): return Like(StoqNormalizeString(table_field), StoqNormalizeString(u'%%%s%%' % value.lower()), case_sensitive=False)
def __init__(self, expr, string): string = string.replace("!", "!!") \ .replace("_", "!_") \ .replace("%", "!%") Like.__init__(self, expr, "%"+string+"%", SQLRaw("'!'"))