def __init__(self) -> None: """ curl -X DELETE http://localhost:9200/personalization_orders curl -X PUT http://localhost:9200/personalization_orders -H "Content-Type: application/json" -d'{ "mappings": { "personalization_orders": { "properties": { "order_number": {"type": "keyword"}, "email": {"type": "keyword"}, "ordered_at": {"type": "date", "format": "yyyy-MM-dd HH:mm:ss"}, "rs_sku": {"type": "keyword"}, "rs_simple_sku": {"type": "keyword"}, "product_name": {"type": "keyword"}, "manufacturer": {"type": "keyword"}, "gender": {"type": "keyword"}, "product_size_attribute": {"type": "keyword"}, "rs_colour": {"type": "keyword"}, "size": {"type": "keyword"} } } } }' """ self.__elastic = Elastic( settings.AWS_ELASTICSEARCH_PERSONALIZATION_ORDERS, settings.AWS_ELASTICSEARCH_PERSONALIZATION_ORDERS)
def __init__(self): self.__requests_elastic = Elastic( settings.AWS_ELASTICSEARCH_PURCHASE_RETURN_REQUESTS, settings.AWS_ELASTICSEARCH_PURCHASE_RETURN_REQUESTS) self.__customer_requests_map_elastic = Elastic( settings.AWS_ELASTICSEARCH_PURCHASE_RETURN_REQUESTS_CUSTOMER_MAP, settings.AWS_ELASTICSEARCH_PURCHASE_RETURN_REQUESTS_CUSTOMER_MAP) self.__reflector = Reflector()
def __init__(self): self.__requests_elastic = Elastic( settings.AWS_ELASTICSEARCH_PURCHASE_CANCEL_REQUESTS, settings.AWS_ELASTICSEARCH_PURCHASE_CANCEL_REQUESTS) self.__order_requests_map_elastic = Elastic( settings.AWS_ELASTICSEARCH_PURCHASE_CANCEL_REQUESTS_ORDERS_MAP, settings.AWS_ELASTICSEARCH_PURCHASE_CANCEL_REQUESTS_ORDERS_MAP) self.__reflector = Reflector()
def __init__(self): self.__orders_elastic = Elastic( settings.AWS_ELASTICSEARCH_PURCHASE_ORDERS, settings.AWS_ELASTICSEARCH_PURCHASE_ORDERS) self.__customer_orders_map_elastic = Elastic( settings.AWS_ELASTICSEARCH_PURCHASE_ORDERS_CUSTOMER_ORDERS_MAP, settings.AWS_ELASTICSEARCH_PURCHASE_ORDERS_CUSTOMER_ORDERS_MAP) self.__reflector = Reflector() self.__current_vat_value = PurchaseSettings().vat
def __init__(self): self.__tiers_storage = CustomerTierStorageImplementation() self.__messages = MessageStorageImplementation() self.__logger = Logger() """""" # @todo : refactoring from chalicelib.libs.purchase.core import CustomerInterface from chalicelib.libs.purchase.customer.storage import CustomerStorageImplementation see = CustomerInterface.tier see = CustomerStorageImplementation.save """""" self.__elastic = Elastic( settings.AWS_ELASTICSEARCH_CUSTOMER_TIERS_CUSTOMER_TIERS, settings.AWS_ELASTICSEARCH_CUSTOMER_TIERS_CUSTOMER_TIERS)
def __init__(self, customer: CustomerInterface, checkout_items: Tuple['Checkout.Item'], delivery_cost: Cost, vat_percent: Percentage): if not isinstance(customer, CustomerInterface): raise ArgumentTypeException(self.__init__, 'customer', customer) if sum([ not isinstance(checkout_item, Checkout.Item) for checkout_item in checkout_items ]) > 0: raise TypeError( '{0} expects array of {1} in {2}, but {3} is given!'.format( self.__init__.__qualname__, Checkout.Item.__qualname__, 'checkout_items', str(checkout_items))) if not isinstance(delivery_cost, Cost): raise ArgumentTypeException(self.__init__, 'delivery_cost', delivery_cost) if not isinstance(vat_percent, Percentage): raise ArgumentTypeException(self.__init__, 'vat_percent', vat_percent) self.__customer = customer self.__checkout_items = [ checkout_item for checkout_item in checkout_items ] self.__delivery_address = None self.__delivery_cost = delivery_cost self.__vat_percent = vat_percent # Available credits amount must be set on checkout initialization, because # amount can be changed by other process during current checkout (other browser, cash-out, etc.). # Currently this is unavailable, because we always have only one checkout process for user (used user.id) # and we don't use other credits, expect f-bucks, which can be only increased in other process, # but we can't trust this, because we should not know about these things on this layer. # We should reserve credit amounts at the start of checkout process. # This will protect us from multiple usage of the same credit amounts. # Another way is to load fresh credits amount every time, but in this case # user can see different amounts before and after payment operation. And looks like other strange things # can happen in this case. So the best way is reservation. # @todo : implement reservation of credit amounts ??? # @todo : raw data usage. should be used customer.credits or so ??? from chalicelib.settings import settings from chalicelib.libs.core.elastic import Elastic available_credits_amount = Cost((Elastic( settings.AWS_ELASTICSEARCH_FBUCKS_CUSTOMER_AMOUNT, settings.AWS_ELASTICSEARCH_FBUCKS_CUSTOMER_AMOUNT).get_data( customer.customer_id.value) or { 'amount': 0 }).get('amount') or 0) self.__available_credits_amount = available_credits_amount self.__is_credits_in_use = False
def __init__(self): """ curl -X DELETE localhost:9200/purchase_customer_credit_cards curl -X PUT localhost:9200/purchase_customer_credit_cards -H "Content-Type: application/json" -d'{ "mappings": { "purchase_customer_credit_cards": { "properties": { "token": {"type": "keyword"}, "customer_id": {"type": "keyword"}, "brand": {"type": "keyword"}, "number_hidden": {"type": "keyword"}, "expires": {"type": "keyword"}, //2005 -> 2020/05 "holder_name": {"type": "keyword"}, "is_verified": {"type": "boolean"}, "created_at": {"type": "date", "format": "yyyy-MM-dd HH:mm:ss"} } } } }' curl -X DELETE localhost:9200/purchase_customer_credit_cards_customer_map curl -X PUT localhost:9200/purchase_customer_credit_cards_customer_map -H "Content-Type: application/json" -d'{ "mappings": { "purchase_customer_credit_cards_customer_map": { "properties": { "tokens_json": {"type": "keyword"} } } } }' """ self.__elastic_cards = Elastic( settings.AWS_ELASTICSEARCH_PURCHASE_CUSTOMER_CREDIT_CARDS, settings.AWS_ELASTICSEARCH_PURCHASE_CUSTOMER_CREDIT_CARDS) self.__elastic_customer_cards_map = Elastic( settings. AWS_ELASTICSEARCH_PURCHASE_CUSTOMER_CREDIT_CARDS_CUSTOMER_MAP, settings. AWS_ELASTICSEARCH_PURCHASE_CUSTOMER_CREDIT_CARDS_CUSTOMER_MAP) self.__reflector = Reflector()
def __init__(self): """ curl -X DELETE localhost:9200/customer_tiers_tiers curl -X PUT localhost:9200/customer_tiers_tiers -H "Content-Type: application/json" -d'{ "mappings": { "customer_tiers_tiers": { "properties": { "id": {"type": "integer"}, "name": {"type": "keyword"}, "credit_back_percent": {"type": "integer"}, "spent_amount_min": {"type": "integer"}, "spent_amount_max": {"type": "integer"}, "is_deleted": {"type": "boolean"} } } } }' """ self.__elastic = Elastic( settings.AWS_ELASTICSEARCH_CUSTOMER_TIERS_TIERS, settings.AWS_ELASTICSEARCH_CUSTOMER_TIERS_TIERS ) self.__reflector = Reflector()
def customer_credit_info(): try: user = __get_user() """""" # @TODO : REFACTORING !!! from chalicelib.libs.purchase.customer.sqs import FbucksChargeSqsHandler see = FbucksChargeSqsHandler """""" # fbucks amount __fbucks_customer_amount_elastic = Elastic( settings.AWS_ELASTICSEARCH_FBUCKS_CUSTOMER_AMOUNT, settings.AWS_ELASTICSEARCH_FBUCKS_CUSTOMER_AMOUNT, ) fbucks_amount_row = __fbucks_customer_amount_elastic.get_data( user.id) fbucks_amount = fbucks_amount_row[ 'amount'] or 0 if fbucks_amount_row else 0 # fbucks history __fbucks_customer_amount_changes_elastic = Elastic( settings.AWS_ELASTICSEARCH_FBUCKS_CUSTOMER_AMOUNT_CHANGES, settings.AWS_ELASTICSEARCH_FBUCKS_CUSTOMER_AMOUNT_CHANGES, ) fbucks_changes = __fbucks_customer_amount_changes_elastic.post_search( { "query": { "term": { "customer_id": user.id } } }).get('hits', {}).get('hits', []) or [] fbucks_changes = [ fbucks_change['_source'] for fbucks_change in fbucks_changes ] fbucks_changes = [{ 'amount': fbucks_change['amount'], 'changed_at': fbucks_change['changed_at'], 'order_number': fbucks_change['order_number'], } for fbucks_change in fbucks_changes] # cash out balance credit = CreditStorageImplementation().load(user.email) cash_out_balance = credit.paid if credit else 0 return { 'fbucks_amount': fbucks_amount, 'fbucks_changes': fbucks_changes, 'cache_out_balance': cash_out_balance, } except BaseException as e: return http_response_exception_or_throw(e)
def handle(self, sqs_message: SqsMessage) -> None: """ crutch customer_info_request_answer [ 'customer_email' => $customer->getEmail(), 'name' => [ 'first' => $customer->getFirstName() ?: null, 'last' => $customer->getLastName() ?: null, ], 'gender' => 'F' / 'M', 'addresses' => [ [ 'nickname' => $address->getAddressNickname() ?: null, 'phone' => $address->getTelephone() ?: $address->getContactNumber() ?: null, 'street' => $address->getStreet() ?: null, 'suburb' => $address->getSuburb() ?: null, 'post_code' => $address->getPostCode() ?: null, 'city' => $address->getCity() ?: null, 'province' => $address->getProvince() ?: null, 'country_code' => $address->getCountryCode() ?: null, 'is_default_billing' => $address->getId() == $customer->getDefaultBillingAddressId(), 'is_default_shipping' => $address->getId() == $customer->getDefaultShippingAddressId(), ], ... ], 'tier' => [ 'id' => $customerTier->getId(), 'name' => $customerTier->getName(), 'credit_back_percent' => $customerTier->getCreditBackPercent(), 'spent_amount_min' => $customerTier->getSpendAmountMin(), 'spent_amount_max' => $customerTier->getSpendAmountMax(), ] ] """ # @todo : perhaps, update other data - needs to be able to get customer by email # 'tier' here is the same tier as in 'customer_tiers_set' sqs-message. # Theoretically this message can be handled earlier than 'customer_tiers_set' message, # so we need to be sure, that all new tiers exist. tier_data = sqs_message.message_data['tier'] tier = self.__tiers_storage.get_by_id(Id(str(tier_data['id']))) if not tier: tier = CustomerTier( Id(str(tier_data['id'])), Name(tier_data['name']), Percentage(int(tier_data['credit_back_percent'])), int(tier_data['spent_amount_min']), int(tier_data['spent_amount_max'])) self.__tiers_storage.save(tier) # assign user to tier """""" # @todo : refactoring from chalicelib.libs.purchase.core import CustomerInterface from chalicelib.libs.purchase.customer.storage import CustomerStorageImplementation see = CustomerInterface.tier see = CustomerStorageImplementation.save """""" customer_email = sqs_message.message_data['customer_email'] elastic = Elastic( settings.AWS_ELASTICSEARCH_CUSTOMER_TIERS_CUSTOMER_TIERS, settings.AWS_ELASTICSEARCH_CUSTOMER_TIERS_CUSTOMER_TIERS) if elastic.get_data(customer_email): elastic.update_data(customer_email, {'doc': { 'tier_id': tier.id.value }}) else: elastic.create(customer_email, {'tier_id': tier.id.value})
class _CancelRequestStorageElastic(CancelRequestStorageInterface): """ curl -X DELETE localhost:9200/purchase_cancel_requests curl -X PUT localhost:9200/purchase_cancel_requests -H "Content-Type: application/json" -d'{ "mappings": { "purchase_cancel_requests": { "properties": { "request_number": {"type": "keyword"}, "order_number": {"type": "keyword"}, "request_items": { "properties": { "simple_sku": {"type": "keyword"}, "qty": {"type": "integer"}, "status": {"type": "keyword"}, "processed_at": {"type": "date", "format": "date_hour_minute_second_millis"} } }, "refund_method": {"type": "keyword"}, "refund_method_extra_data_json": {"type": "keyword"}, "additional_comment": {"type": "keyword"}, "requested_at": {"type": "date", "format": "date_hour_minute_second_millis"} } } } }' curl -X DELETE localhost:9200/purchase_cancel_requests_orders_map curl -X PUT localhost:9200/purchase_cancel_requests_orders_map -H "Content-Type: application/json" -d'{ "mappings": { "purchase_cancel_requests_orders_map": { "properties": { "request_numbers_json": {"type": "keyword"} } } } }' """ __ENTITY_PROPERTY_REQUEST_NUMBER = '__number' __ENTITY_PROPERTY_ORDER_NUMBER = '__order_number' __ENTITY_PROPERTY_ITEMS = '__items' __ENTITY_PROPERTY_ITEMS_SIMPLE_SKU = '__simple_sku' __ENTITY_PROPERTY_ITEMS_QTY = '__qty' __ENTITY_PROPERTY_ITEMS_STATUS = '__status' __ENTITY_PROPERTY_ITEMS_PROCESSED_AT = '__processed_at' __ENTITY_PROPERTY_REFUND_METHOD = '__refund_method' __ENTITY_PROPERTY_ADDITIONAL_COMMENT = '__additional_comment' __ENTITY_PROPERTY_REQUESTED_AT = '__requested_at' def __init__(self): self.__requests_elastic = Elastic( settings.AWS_ELASTICSEARCH_PURCHASE_CANCEL_REQUESTS, settings.AWS_ELASTICSEARCH_PURCHASE_CANCEL_REQUESTS) self.__order_requests_map_elastic = Elastic( settings.AWS_ELASTICSEARCH_PURCHASE_CANCEL_REQUESTS_ORDERS_MAP, settings.AWS_ELASTICSEARCH_PURCHASE_CANCEL_REQUESTS_ORDERS_MAP) self.__reflector = Reflector() def save(self, cancel_request: CancelRequest) -> None: if not isinstance(cancel_request, CancelRequest): raise ArgumentTypeException(self.save, 'cancel_request', cancel_request) items_data = [] for item in cancel_request.items: items_data.append({ "simple_sku": item.simple_sku.value, "qty": item.qty.value, "status": item.status.value, # elastic supports only 3 digits for milliseconds "processed_at": item.processed_at.strftime('%Y-%m-%dT%H:%M:%S.%f')[:-3] if item.processed_at else None }) document_id = cancel_request.number.value document_data = { "request_number": cancel_request.number.value, "order_number": cancel_request.order_number.value, "request_items": items_data, 'refund_method': cancel_request.refund_method.descriptor, 'refund_method_extra_data_json': json.dumps(cancel_request.refund_method.extra_data), # elastic supports only 3 digits for milliseconds "requested_at": cancel_request.requested_at.strftime('%Y-%m-%dT%H:%M:%S.%f')[:-3], "additional_comment": cancel_request.additional_comment.value if cancel_request.additional_comment else None } existed_request = self.get_by_number(cancel_request.number) if existed_request: self.__requests_elastic.update_data(document_id, {'doc': document_data}) else: self.__requests_elastic.create(document_id, document_data) # Elastic can search by attributes only after 1 second from last update. # We need all data, when we are searching by order_number, # so in this case we will lost fresh data, if search directly after creation of a new cancel_request. # In this case we need to use another index and get data by elastic doc_id. order_requests_map = self.__order_requests_map_elastic.get_data( cancel_request.order_number.value) if order_requests_map: request_numbers = list( json.loads( order_requests_map.get('request_numbers_json', '[]')) or []) request_numbers.append(cancel_request.number.value) request_numbers = list(set(request_numbers)) self.__order_requests_map_elastic.update_data( cancel_request.order_number.value, { 'doc': { 'request_numbers_json': json.dumps(request_numbers) } }) else: self.__order_requests_map_elastic.create( cancel_request.order_number.value, { 'request_numbers_json': json.dumps([cancel_request.number.value]) }) def get_by_number( self, request_number: CancelRequest.Number) -> Optional[CancelRequest]: if not isinstance(request_number, CancelRequest.Number): raise ArgumentTypeException(self.get_by_number, 'request_number', request_number) data = self.__requests_elastic.get_data(request_number.value) result = self.__restore(data) if data else None return result def __restore(self, data: dict) -> CancelRequest: cancel_request = self.__reflector.construct( CancelRequest, { self.__class__.__ENTITY_PROPERTY_REQUEST_NUMBER: CancelRequest.Number(data['request_number']), self.__class__.__ENTITY_PROPERTY_ORDER_NUMBER: OrderNumber(data['order_number']), self.__class__.__ENTITY_PROPERTY_ITEMS: tuple([ self.__reflector.construct( CancelRequest.Item, { self.__class__.__ENTITY_PROPERTY_ITEMS_SIMPLE_SKU: SimpleSku(item_data['simple_sku']), self.__class__.__ENTITY_PROPERTY_ITEMS_QTY: Qty(item_data['qty']), self.__class__.__ENTITY_PROPERTY_ITEMS_STATUS: CancelRequest.Item.Status(item_data['status']), self.__class__.__ENTITY_PROPERTY_ITEMS_PROCESSED_AT: (datetime.datetime.strptime( item_data['processed_at'] + '000', '%Y-%m-%dT%H:%M:%S.%f') if item_data['processed_at'] else None), }) for item_data in data['request_items'] ]), self.__class__.__ENTITY_PROPERTY_REFUND_METHOD: _restore_refund_method( data['refund_method'], json.loads(data['refund_method_extra_data_json'])), self.__class__.__ENTITY_PROPERTY_ADDITIONAL_COMMENT: (CancelRequest.AdditionalComment(data['additional_comment']) if data.get('additional_comment') or None else None), self.__class__.__ENTITY_PROPERTY_REQUESTED_AT: datetime.datetime.strptime(data['requested_at'] + '000', '%Y-%m-%dT%H:%M:%S.%f'), }) return cancel_request def get_all_by_order_number( self, order_number: OrderNumber) -> Tuple[CancelRequest]: if not isinstance(order_number, OrderNumber): raise ArgumentTypeException(self.get_all_by_order_number, 'order_number', order_number) data = self.__order_requests_map_elastic.get_data(order_number.value) request_numbers = json.loads(( data.get('request_numbers_json') or '[]') if data else '[]') or [] if not request_numbers: return tuple() rows = self.__requests_elastic.post_search({ "query": { "ids": { "values": request_numbers } }, "size": 10000 }).get('hits', {}).get('hits', []) or [] result = [self.__restore(row['_source']) for row in rows] if len(result) != len(request_numbers): message = '{} can\'t find all CancelRequests for Order #{}! Not existed CancelRequests in map: {}' raise ValueError( message.format( self.get_all_by_order_number, order_number.value, [ request_number for request_number in request_numbers if request_number not in [request.number.value for request in result] ])) return tuple(result)
class CustomerTiersCustomersSqsHandler(SqsHandlerInterface): def __init__(self): self.__tiers_storage = CustomerTierStorageImplementation() self.__messages = MessageStorageImplementation() self.__logger = Logger() """""" # @todo : refactoring from chalicelib.libs.purchase.core import CustomerInterface from chalicelib.libs.purchase.customer.storage import CustomerStorageImplementation see = CustomerInterface.tier see = CustomerStorageImplementation.save """""" self.__elastic = Elastic( settings.AWS_ELASTICSEARCH_CUSTOMER_TIERS_CUSTOMER_TIERS, settings.AWS_ELASTICSEARCH_CUSTOMER_TIERS_CUSTOMER_TIERS) def handle(self, sqs_message: SqsMessage) -> None: # 'tiers' here are the same tiers set as in 'customer_tiers_set' sqs-message. # Theoretically this message can be handled earlier than 'customer_tiers_set' message, # so we need to be sure, that all new tiers exist. incoming_tiers_ids = [ row['id'] for row in sqs_message.message_data['tiers'] ] stored_tiers_ids = [ tier.id.value for tier in self.__tiers_storage.get_all() ] if sum([ tier_id for tier_id in incoming_tiers_ids if tier_id not in stored_tiers_ids ]) > 0: # @todo : this is a crutch CustomerTiersTiersSqsHandler().handle( SqsMessage(sqs_message.id, 'customer_tiers_set', {'tiers': sqs_message.message_data['tiers']})) # assign customers to tiers tiers = self.__tiers_storage.get_all() tiers_map = {} for tier in tiers: tiers_map[tier.id.value] = tier for customer_tier_data in sqs_message.message_data.get('customers'): customer_email = str(customer_tier_data['email']) tier_id = int(customer_tier_data['tier_id']) if self.__elastic.get_data(customer_email): self.__elastic.update_data(customer_email, {'doc': { 'tier_id': tier_id }}) else: self.__elastic.create(customer_email, {'tier_id': tier_id}) # notify user (silently) try: tier = tiers_map[str(tier_id)] self.__messages.save( Message( str(uuid.uuid4()), customer_email, 'Your Customer Tier has been changed!', 'Now you are in the "{}" Customer Tier!'.format( tier.name.value))) except BaseException as e: self.__logger.log_exception(e)
class _ReturnRequestStorageElastic(ReturnRequestStorageInterface): """ curl -X DELETE localhost:9200/purchase_return_requests curl -X PUT localhost:9200/purchase_return_requests -H "Content-Type: application/json" -d'{ "mappings": { "purchase_return_requests": { "properties": { "request_number": {"type": "keyword"}, "customer_id": {"type": "keyword"}, "request_items": { "properties": { "order_number": {"type": "keyword"}, "simple_sku": {"type": "keyword"}, "qty": {"type": "integer"}, "cost": {"type": "float"}, "reason": {"type": "keyword"}, "additional_comment": {"type": "keyword"}, "attached_files_urls_json": {"type": "keyword"}, "status_history": { "properties": { "status": {"type": "keyword"}, "datetime": {"type": "date", "format": "date_hour_minute_second_millis"} } } } }, "delivery_method": {"type": "keyword"}, "refund_method": {"type": "keyword"}, "refund_method_extra_data_json": {"type": "keyword"} } } } }' curl -X DELETE localhost:9200/purchase_return_requests_customer_map curl -X PUT localhost:9200/purchase_return_requests_customer_map -H "Content-Type: application/json" -d'{ "mappings": { "purchase_return_requests_customer_map": { "properties": { "request_numbers_json": {"type": "keyword"} } } } }' """ __ENTITY_PROPERTY_REQUEST_NUMBER = '__number' __ENTITY_PROPERTY_CUSTOMER_ID = '__customer_id' __ENTITY_PROPERTY_REFUND_METHOD = '__refund_method' __ENTITY_PROPERTY_DELIVERY_METHOD = '__delivery_method' __ENTITY_PROPERTY_ITEMS = '__items' __ENTITY_PROPERTY_ITEMS_ORDER_NUMBER = '__order_number' __ENTITY_PROPERTY_ITEMS_SIMPLE_SKU = '__simple_sku' __ENTITY_PROPERTY_ITEMS_QTY = '__qty' __ENTITY_PROPERTY_ITEMS_COST = '__cost' __ENTITY_PROPERTY_ITEMS_REASON = '__reason' __ENTITY_PROPERTY_ITEMS_ATTACHED_FILES = '__attached_files' __ENTITY_PROPERTY_ITEMS_ADDITIONAL_COMMENT = '__additional_comment' __ENTITY_PROPERTY_ITEMS_STATUS_HISTORY = '__status_history' def __init__(self): self.__requests_elastic = Elastic( settings.AWS_ELASTICSEARCH_PURCHASE_RETURN_REQUESTS, settings.AWS_ELASTICSEARCH_PURCHASE_RETURN_REQUESTS) self.__customer_requests_map_elastic = Elastic( settings.AWS_ELASTICSEARCH_PURCHASE_RETURN_REQUESTS_CUSTOMER_MAP, settings.AWS_ELASTICSEARCH_PURCHASE_RETURN_REQUESTS_CUSTOMER_MAP) self.__reflector = Reflector() def save(self, return_request: ReturnRequest) -> None: if not isinstance(return_request, ReturnRequest): raise ArgumentTypeException(self.save, 'return_request', return_request) items_data = [] for item in return_request.items: status_history: ReturnRequest.Item.StatusChangesHistory = self.__reflector.extract( item, (self.__class__.__ENTITY_PROPERTY_ITEMS_STATUS_HISTORY, ))[self.__class__.__ENTITY_PROPERTY_ITEMS_STATUS_HISTORY] items_data.append({ "order_number": item.order_number.value, "simple_sku": item.simple_sku.value, "qty": item.qty.value, "cost": item.cost.value, "reason": item.reason.descriptor, "additional_comment": item.additional_comment.value if item.additional_comment else None, "attached_files_urls_json": json.dumps([file.url for file in item.attached_files]), "status_history": [ { 'status': status_change.status.value, # elastic supports only 3 digits for milliseconds 'datetime': status_change.datetime.strftime('%Y-%m-%dT%H:%M:%S.%f') [:-3], } for status_change in status_history.get_all() ] }) document_id = return_request.number.value document_data = { "request_number": return_request.number.value, "customer_id": return_request.customer_id.value, "request_items": items_data, "delivery_method": return_request.delivery_method.descriptor, "refund_method": return_request.refund_method.descriptor, "refund_method_extra_data_json": json.dumps(return_request.refund_method.extra_data), } existed_request = self.load(return_request.number) if existed_request: # just a double check of number uniqueness if existed_request.customer_id != return_request.customer_id: raise RuntimeError( 'Return Request "{}" already exists and belongs to another Customer!' .format(return_request.number)) self.__requests_elastic.update_data(document_id, {'doc': document_data}) else: self.__requests_elastic.create(document_id, document_data) # Elastic can search by attributes only after 1 second from last update. # We need all data, when we are searching by customer_id, # so in this case we will lost fresh data, if search directly after creation of a new return request. # In this case we need to use another index and get data by elastic doc_id. customer_requests_map = self.__customer_requests_map_elastic.get_data( return_request.customer_id.value) if customer_requests_map: request_numbers = list( json.loads( customer_requests_map.get('request_numbers_json', '[]')) or []) request_numbers.append(return_request.number.value) request_numbers = list(set(request_numbers)) self.__customer_requests_map_elastic.update_data( return_request.customer_id.value, { 'doc': { 'request_numbers_json': json.dumps(request_numbers) } }) else: self.__customer_requests_map_elastic.create( return_request.customer_id.value, { 'request_numbers_json': json.dumps([return_request.number.value]) }) def load(self, request_number: ReturnRequest.Number) -> Optional[ReturnRequest]: if not isinstance(request_number, ReturnRequest.Number): raise ArgumentTypeException(self.load, 'request_number', request_number) data = self.__requests_elastic.get_data(request_number.value) result = self.__restore(data) if data else None return result def __restore(self, data: dict) -> ReturnRequest: request_items = [] for item_data in data['request_items']: attached_files = json.loads(item_data['attached_files_urls_json']) attached_files = tuple([ ReturnRequest.Item.AttachedFile(url) for url in attached_files ]) additional_comment = ReturnRequest.Item.AdditionalComment( item_data['additional_comment']) status_history = ReturnRequest.Item.StatusChangesHistory( tuple([ self.__reflector.construct( ReturnRequest.Item.StatusChangesHistory.Change, { '__status': ReturnRequest.Item.Status(change['status']), # elastic supports only 3 digits for milliseconds '__datetime': datetime.datetime.strptime( change['datetime'] + '000', '%Y-%m-%dT%H:%M:%S.%f'), }) for change in item_data['status_history'] ])) request_items.append( self.__reflector.construct( ReturnRequest.Item, { self.__class__.__ENTITY_PROPERTY_ITEMS_ORDER_NUMBER: OrderNumber(item_data['order_number']), self.__class__.__ENTITY_PROPERTY_ITEMS_SIMPLE_SKU: SimpleSku(item_data['simple_sku']), self.__class__.__ENTITY_PROPERTY_ITEMS_QTY: Qty(item_data['qty']), self.__class__.__ENTITY_PROPERTY_ITEMS_COST: Cost(item_data['cost']), self.__class__.__ENTITY_PROPERTY_ITEMS_REASON: ReturnRequest.Item.Reason(item_data['reason']), self.__class__.__ENTITY_PROPERTY_ITEMS_ATTACHED_FILES: attached_files, self.__class__.__ENTITY_PROPERTY_ITEMS_ADDITIONAL_COMMENT: additional_comment, self.__class__.__ENTITY_PROPERTY_ITEMS_STATUS_HISTORY: status_history, })) return_request = self.__reflector.construct( ReturnRequest, { self.__class__.__ENTITY_PROPERTY_REQUEST_NUMBER: ReturnRequest.Number(data['request_number']), self.__class__.__ENTITY_PROPERTY_CUSTOMER_ID: Id(data['customer_id']), self.__class__.__ENTITY_PROPERTY_ITEMS: tuple(request_items), self.__class__.__ENTITY_PROPERTY_DELIVERY_METHOD: _restore_delivery_method(data['delivery_method']), self.__class__.__ENTITY_PROPERTY_REFUND_METHOD: _restore_refund_method( data['refund_method'], json.loads(data['refund_method_extra_data_json'])), }) return return_request def get_all_for_customer(self, customer_id: Id) -> Tuple[ReturnRequest]: if not isinstance(customer_id, Id): raise ArgumentTypeException(self.get_all_for_customer, 'customer_id', customer_id) data = self.__customer_requests_map_elastic.get_data(customer_id.value) request_numbers = json.loads(( data.get('request_numbers_json') or '[]') if data else '[]') or [] if not request_numbers: return tuple() rows = self.__requests_elastic.post_search({ "query": { "ids": { "values": request_numbers } }, "size": 10000 }).get('hits', {}).get('hits', []) or [] result = [self.__restore(row['_source']) for row in rows] if len(result) != len(request_numbers): message = '{} can\'t find all Return-Requests for Customer #{}! Not existed Return-Requests in map: {}' raise ValueError( message.format(self.get_all_for_customer, customer_id.value, [ request_number for request_number in request_numbers if request_number not in [request.number.value for request in result] ])) return tuple(result)
class _OrderElasticStorage(OrderStorageInterface): """ curl -X DELETE localhost:9200/purchase_orders curl -X PUT localhost:9200/purchase_orders -H "Content-Type: application/json" -d'{ "mappings": { "purchase_orders": { "properties": { "order_number": {"type": "keyword"}, "customer_id": {"type": "keyword"}, "order_items": { "properties": { "event_code": {"type": "keyword"}, "simple_sku": {"type": "keyword"}, "product_original_price": {"type": "float"}, "product_current_price": {"type": "float"}, "dtd_occasion_name": {"type": "keyword"}, "dtd_occasion_description": {"type": "keyword"}, "dtd_date_from": {"type": "date", "format": "date"}, "dtd_date_to": {"type": "date", "format": "date"}, "dtd_min": {"type": "integer"}, "dtd_max": {"type": "integer"}, "qty_ordered": {"type": "integer"}, "qty_return_requested": {"type": "integer"}, "qty_return_returned": {"type": "integer"}, "qty_cancelled_before_payment": {"type": "integer"}, "qty_cancelled_after_payment_requested": {"type": "integer"}, "qty_cancelled_after_payment_cancelled": {"type": "integer"}, "qty_refunded": {"type": "integer"}, "qty_modified_at": {"type": "date", "format": "date_hour_minute_second_millis"}, "fbucks_amount": {"type": "float"} } }, "delivery_address_recipient_name": {"type": "keyword"}, "delivery_address_phone_number": {"type": "keyword"}, "delivery_address_street_address": {"type": "keyword"}, "delivery_address_suburb": {"type": "keyword"}, "delivery_address_city": {"type": "keyword"}, "delivery_address_province": {"type": "keyword"}, "delivery_address_complex_building": {"type": "keyword"}, "delivery_address_postal_code": {"type": "keyword"}, "delivery_address_business_name": {"type": "keyword"}, "delivery_address_special_instructions": {"type": "keyword"}, "delivery_cost": {"type": "float"}, "vat_percent": {"type": "float"}, "credits_spent": {"type": "float"}, "payment_method": {"type": "keyword"}, "payment_method_extra_data_json": {"type": "keyword"}, "status_history": { "properties": { "status": {"type": "keyword"}, "datetime": {"type": "date", "format": "date_hour_minute_second_millis"} } } } } } }' curl -X DELETE localhost:9200/purchase_orders_customer_orders_map curl -X PUT localhost:9200/purchase_orders_customer_orders_map -H "Content-Type: application/json" -d'{ "mappings": { "purchase_orders_customer_orders_map": { "properties": { "order_numbers_json": {"type": "keyword"} } } } }' """ def __init__(self): self.__orders_elastic = Elastic( settings.AWS_ELASTICSEARCH_PURCHASE_ORDERS, settings.AWS_ELASTICSEARCH_PURCHASE_ORDERS) self.__customer_orders_map_elastic = Elastic( settings.AWS_ELASTICSEARCH_PURCHASE_ORDERS_CUSTOMER_ORDERS_MAP, settings.AWS_ELASTICSEARCH_PURCHASE_ORDERS_CUSTOMER_ORDERS_MAP) self.__reflector = Reflector() self.__current_vat_value = PurchaseSettings().vat def save(self, order: Order) -> None: if not isinstance(order, Order): raise ArgumentTypeException(self.save, 'order', order) order_number = order.number delivery_address = order.delivery_address status_changes = order.status_history document_id = order_number.value document_data = { 'order_number': order_number.value, 'customer_id': order.customer_id.value, 'order_items': [ { 'event_code': item.event_code.value, 'simple_sku': item.simple_sku.value, 'product_original_price': item.product_original_price.value, 'product_current_price': item.product_current_price.value, 'dtd_occasion_name': item.dtd.occasion.name.value if item.dtd.occasion else None, 'dtd_occasion_description': item.dtd.occasion.description.value if item.dtd.occasion else None, 'dtd_date_from': item.dtd.date_from.strftime('%Y-%m-%d'), 'dtd_date_to': item.dtd.date_to.strftime('%Y-%m-%d'), 'dtd_working_days_from': item.dtd.working_days_from, 'dtd_working_days_to': item.dtd.working_days_to, 'qty_ordered': item.qty_ordered.value, 'qty_return_requested': item.qty_return_requested.value, 'qty_return_returned': item.qty_return_returned.value, 'qty_cancelled_before_payment': item.qty_cancelled_before_payment.value, 'qty_cancelled_after_payment_requested': item.qty_cancelled_after_payment_requested.value, 'qty_cancelled_after_payment_cancelled': item.qty_cancelled_after_payment_cancelled.value, 'qty_refunded': item.qty_refunded.value, # elastic supports only 3 digits for milliseconds 'qty_modified_at': item.qty_modified_at.strftime('%Y-%m-%dT%H:%M:%S.%f')[:-3], 'fbucks_amount': item.fbucks_earnings.value, } for item in order.items ], 'delivery_address_recipient_name': delivery_address.recipient_name, 'delivery_address_phone_number': delivery_address.phone_number, 'delivery_address_street_address': delivery_address.street_address, 'delivery_address_suburb': delivery_address.suburb, 'delivery_address_city': delivery_address.city, 'delivery_address_province': delivery_address.province, 'delivery_address_complex_building': delivery_address.complex_building, 'delivery_address_postal_code': delivery_address.postal_code, 'delivery_address_business_name': delivery_address.business_name, 'delivery_address_special_instructions': delivery_address.special_instructions, 'delivery_cost': order.delivery_cost.value, 'vat_percent': order.vat_percent.value, 'credits_spent': order.credit_spent_amount.value, 'payment_method': order.payment_method.descriptor if order.payment_method else None, 'payment_method_extra_data_json': json.dumps(order.payment_method.extra_data if order. payment_method else {}), 'status_history': [ { 'status': status_change.status.value, # elastic supports only 3 digits for milliseconds 'datetime': status_change.datetime.strftime('%Y-%m-%dT%H:%M:%S.%f') [:-3], } for status_change in status_changes ], } existed_order = self.load(order_number) if existed_order: # just a double check of order number uniqueness if existed_order.customer_id != order.customer_id: raise RuntimeError( 'Order "{}" already exists and belongs to another Customer!' .format(order_number)) self.__orders_elastic.update_data(document_id, {'doc': document_data}) else: self.__orders_elastic.create(document_id, document_data) # Elastic can search by attributes only after 1 second from last update. # We need all data, when we are searching by customer_id, # so in this case we will lost fresh data, if search directly after creation of new order. # In this case we need to use another index and get data by elastic doc_id. customer_orders_map = self.__customer_orders_map_elastic.get_data( order.customer_id.value) if customer_orders_map: order_numbers = list( json.loads( customer_orders_map.get('order_numbers_json', '[]')) or []) order_numbers.append(order.number.value) order_numbers = list(set(order_numbers)) self.__customer_orders_map_elastic.update_data( order.customer_id.value, {'doc': { 'order_numbers_json': json.dumps(order_numbers) }}) else: self.__customer_orders_map_elastic.create( order.customer_id.value, {'order_numbers_json': json.dumps([order.number.value])}) def load(self, order_number: Order.Number) -> Optional[Order]: if not isinstance(order_number, Order.Number): raise ArgumentTypeException(self.load, 'order_number', order_number) data = self.__orders_elastic.get_data(order_number.value) result = self.__restore(data) if data else None return result def __restore(self, data: dict) -> Order: order_number = Order.Number(data.get('order_number')) customer_id = Id(data.get('customer_id')) delivery_cost = Cost(float(data.get('delivery_cost'))) vat_percent = Percentage( float( # I added "vat_percent" after first orders were stored, # but it's hard to make changes in elastic, so... # @todo : create migration tool. data.get('vat_percent') or self.__current_vat_value)) credits_spent = Cost(float(data.get('credits_spent') or '0')) # can be not existed in old data payment_method = self.__restore_payment_method( data.get('payment_method'), json.loads(data.get('payment_method_extra_data_json') or '{}') if data.get('payment_method') else None) delivery_address = DeliveryAddress( data.get('delivery_address_recipient_name'), data.get('delivery_address_phone_number'), data.get('delivery_address_street_address'), data.get('delivery_address_suburb'), data.get('delivery_address_city'), data.get('delivery_address_province'), data.get('delivery_address_complex_building'), data.get('delivery_address_postal_code'), data.get('delivery_address_business_name'), data.get('delivery_address_special_instructions')) status_changes = [] for status_change_data in data.get('status_history'): status = Order.Status(status_change_data.get('status')) # elastic supports only 3 digits for milliseconds changed_at = datetime.datetime.strptime( status_change_data.get('datetime') + '000', '%Y-%m-%dT%H:%M:%S.%f') status_change = self.__reflector.construct( Order.StatusChangesHistory.Change, { '__status': status, '__datetime': changed_at }) status_changes.append(status_change) status_change_history = Order.StatusChangesHistory( tuple(status_changes)) order_items = [] for item_data in data.get('order_items'): event_code = EventCode(item_data.get('event_code')) simple_sku = SimpleSku(item_data.get('simple_sku')) product_original_price = Cost( item_data.get('product_original_price')) product_current_price = Cost( item_data.get('product_current_price')) fbucks_earnings = Cost(item_data.get('fbucks_amount') or 0) # old orders don't have this field dtd = Dtd( Dtd.Occasion( Name(item_data.get('dtd_occasion_name')), Description(item_data.get('dtd_occasion_description'))) if item_data.get('dtd_occasion_name') else None, datetime.date( int(item_data.get('dtd_date_from').split('-')[0]), int(item_data.get('dtd_date_from').split('-')[1]), int(item_data.get('dtd_date_from').split('-')[2])), datetime.date(int(item_data.get('dtd_date_to').split('-')[0]), int(item_data.get('dtd_date_to').split('-')[1]), int(item_data.get('dtd_date_to').split('-')[2])), int(item_data.get('dtd_working_days_from')), int(item_data.get('dtd_working_days_to'))) qty_ordered = Qty(int(item_data.get('qty_ordered'))) qty_return_requested = Qty( int(item_data.get('qty_return_requested') or 0)) qty_return_returned = Qty( int(item_data.get('qty_return_returned') or 0)) qty_cancelled_before_payment = Qty( int(item_data.get('qty_cancelled_before_payment') or 0)) qty_cancelled_after_payment_requested = Qty( int( item_data.get('qty_cancelled_after_payment_requested') or 0)) qty_cancelled_after_payment_cancelled = Qty( int( item_data.get('qty_cancelled_after_payment_cancelled') or 0)) qty_refunded = Qty(int(item_data.get('qty_refunded') or 0)) # elastic supports only 3 digits for milliseconds qty_modified_at = datetime.datetime.strptime( ( # "qty_modified_at" may not exist for old data (dev, test), # but it's hard to make changes in elastic, so... # @todo : create migration tool. item_data.get('qty_modified_at') or status_change_history.get_last().datetime.strftime( '%Y-%m-%dT%H:%M:%S.%f')[:-3]) + '000', '%Y-%m-%dT%H:%M:%S.%f') order_item = self.__reflector.construct( Order.Item, { '__event_code': event_code, '__simple_sku': simple_sku, '__product_original_price': product_original_price, '__product_current_price': product_current_price, '__dtd': dtd, '__qty_ordered': qty_ordered, '__qty_return_requested': qty_return_requested, '__qty_return_returned': qty_return_returned, '__qty_cancelled_before_payment': qty_cancelled_before_payment, '__qty_cancelled_after_payment_requested': qty_cancelled_after_payment_requested, '__qty_cancelled_after_payment_cancelled': qty_cancelled_after_payment_cancelled, '__qty_refunded': qty_refunded, '__qty_modified_at': qty_modified_at, '__fbucks_earnings': fbucks_earnings }) order_items.append(order_item) order = self.__reflector.construct( Order, { '__order_number': order_number, '__customer_id': customer_id, '__items': order_items, '__delivery_address': delivery_address, '__delivery_cost': delivery_cost, '__vat_percent': vat_percent, '__payment_method': payment_method, '__status_history': status_change_history, '__credits_spent': credits_spent, }) return order def __restore_payment_method( self, descriptor: Optional[str], extra_data: Optional[dict], ) -> Optional[Order.PaymentMethodAbstract]: if not descriptor: return None # @todo : refactoring !!! if descriptor == 'regular_eft': return RegularEftOrderPaymentMethod() elif descriptor == 'mobicred': return MobicredPaymentMethod(extra_data['payment_id']) elif descriptor == 'credit_card': return CreditCardOrderPaymentMethod(extra_data['payment_id']) elif descriptor == 'customer_credit': return CustomerCreditsOrderPaymentMethod() raise Exception( '{} does not know, how to restore {} payment method with data {}!'. format(self.__restore_payment_method, descriptor, extra_data)) def get_all_by_numbers(self, order_numbers: Tuple[Order.Number]) -> Tuple[Order]: if sum([ not isinstance(order_number, Order.Number) for order_number in order_numbers ]) > 0: raise ArgumentTypeException(self.get_all_by_numbers, 'order_numbers', order_numbers) rows = self.__orders_elastic.post_search({ "query": { "ids": { "values": [order_number.value for order_number in order_numbers] } }, "size": 10000 }).get('hits', {}).get('hits', []) or [] result = [self.__restore(row['_source']) for row in rows] return tuple(result) def get_all_for_customer(self, customer_id: Id) -> Tuple[Order]: if not isinstance(customer_id, Id): raise ArgumentTypeException(self.get_all_for_customer, 'customer_id', customer_id) data = self.__customer_orders_map_elastic.get_data(customer_id.value) order_numbers = json.loads(( data.get('order_numbers_json') or '[]') if data else '[]') or [] if not order_numbers: return tuple() rows = self.__orders_elastic.post_search({ "query": { "ids": { "values": order_numbers } }, "size": 10000 }).get('hits', {}).get('hits', []) or [] result = [self.__restore(row['_source']) for row in rows] if len(result) != len(order_numbers): message = '{} can\'t find all Orders for Customer #{}! Not existed order in map: {}' raise ValueError( message.format(self.get_all_for_customer, customer_id.value, [ order_number for order_number in order_numbers if order_number not in [order.number.value for order in result] ])) return tuple(result)
def customer_credits_checkout(): checkout_storage = CheckoutStorageImplementation() order_storage = OrderStorageImplementation() order_app_service = OrderAppService() cart_service = CartAppService() checkout_service = CheckoutAppService() sqs_sender = SqsSenderImplementation() logger = Logger() try: user = __get_user() # @todo : refactoring checkout = checkout_storage.load(Id(user.id)) if not checkout: raise ApplicationLogicException('Checkout does not exist!') elif checkout.total_due.value != 0: raise ApplicationLogicException('Unable to checkout not 0 amount with Customer Credits!') order = order_app_service.get_waiting_for_payment_by_checkout_or_checkout_new(user.id) def __log_flow(text: str) -> None: logger.log_simple('Customer Credits Payment Log for Order #{} : {}'.format( order.number.value, text )) __log_flow('Start') try: __log_flow('Credits Spending...') # Attention! # Currently we use f-bucks only! Other credits are not available for now! # @todo : other credit types # @todo : copy-paste code # @todo : when reservation of credits amount will be done, perhaps, use sqs to spend credits """""" from chalicelib.libs.purchase.core import Checkout see = Checkout.__init__ """""" # @TODO : refactoring : raw data usage import uuid import datetime from chalicelib.settings import settings from chalicelib.libs.core.elastic import Elastic fbucks_customer_amount_elastic = Elastic( settings.AWS_ELASTICSEARCH_FBUCKS_CUSTOMER_AMOUNT, settings.AWS_ELASTICSEARCH_FBUCKS_CUSTOMER_AMOUNT, ) fbucks_customer_amount_changes_elastic = Elastic( settings.AWS_ELASTICSEARCH_FBUCKS_CUSTOMER_AMOUNT_CHANGES, settings.AWS_ELASTICSEARCH_FBUCKS_CUSTOMER_AMOUNT_CHANGES, ) fbucks_customer_amount_elastic.update_data(order.customer_id.value, { 'script': 'ctx._source.amount -= ' + str(order.credit_spent_amount.value) }) fbucks_customer_amount_changes_elastic.create(str(uuid.uuid4()) + str(order.customer_id.value), { "customer_id": order.customer_id.value, "amount": -order.credit_spent_amount.value, "changed_at": datetime.datetime.now().strftime("%Y-%m-%d %H:%M:%S"), "order_number": order.number.value, }) __log_flow('Credits Spent!') __log_flow('Order Updating...') order.payment_method = CustomerCreditsOrderPaymentMethod() order.status = Order.Status(Order.Status.PAYMENT_SENT) order.status = Order.Status(Order.Status.PAYMENT_RECEIVED) order_storage.save(order) __log_flow('Order Updated!') except BaseException as e: __log_flow('Not done because of Error : {}'.format(str(e))) raise e # send order update to sqs try: __log_flow('Order Change SQS Sending...') sqs_sender.send(OrderChangeSqsSenderEvent(order)) __log_flow('Order Change SQS Sent!') except BaseException as e: __log_flow('Order Change SQS NOT Sent because of Error: {}!'.format(str(e))) logger.log_exception(e) # flush cart try: __log_flow('Cart Flushing...') cart_service.clear_cart(user.session_id) __log_flow('Cart Flushed!') except BaseException as e: __log_flow('Cart NOT Flushed because of Error: {}'.format(str(e))) logger.log_exception(e) # flush checkout try: __log_flow('Checkout Flushing...') checkout_service.remove(user.id) __log_flow('Checkout Flushed!') except BaseException as e: __log_flow('Checkout NOT Flushed because of Error: {}'.format(str(e))) logger.log_exception(e) result = { 'order_number': order.number.value } __log_flow('End') return result except BaseException as e: logger.log_exception(e) return http_response_exception_or_throw(e)
def __init__(self): self.__elastic = Elastic(settings.AWS_ELASTICSEARCH_SCORED_PRODUCTS, settings.AWS_ELASTICSEARCH_SCORED_PRODUCTS)
class ScoredProduct(object): INDEX_NAME = settings.AWS_ELASTICSEARCH_SCORED_PRODUCTS __TRACK_IN_DATALAKE__: bool = True __weight__: ScoringWeight = None def __init__(self): self.__elastic = Elastic(settings.AWS_ELASTICSEARCH_SCORED_PRODUCTS, settings.AWS_ELASTICSEARCH_SCORED_PRODUCTS) @property def now(self) -> datetime: return get_mpc_datetime_now() @staticmethod def __convert_filter(filter_name): switcher = { 'id': 'portal_config_id', 'sku': 'rs_sku', 'title': 'product_name', 'subtitle': 'product_description', 'price': 'rs_selling_price', 'product_type': 'product_size_attribute', 'product_sub_type': 'rs_product_sub_type', 'gender': 'gender', 'brand': 'manufacturer', 'size': 'sizes.size', 'color': 'rs_colour', 'newin': 'created_at', # '_score': '_score', ProductSearchCriteria.SORT_COLUMN_PERCENTAGE_SCORE: 'percentage_score', 'search_query': 'search_query', } return switcher.get(filter_name, "invalid_name") @staticmethod def __convert_sort_filter(column_name, direction): sort_map = { 'id': { 'portal_config_id': { 'order': direction } }, 'sku': { 'rs_sku': { 'order': direction } }, 'title': { 'product_name': { 'order': direction } }, 'subtitle': { 'product_description': { 'order': direction } }, 'product_type': { 'product_size_attribute': { 'order': direction } }, 'product_sub_type': { 'rs_product_sub_type': { 'order': direction } }, 'gender': { 'gender': { 'order': direction } }, 'brand': { 'manufacturer': { 'order': direction } }, 'size': { 'sizes.size': { 'order': direction } }, 'color': { 'rs_colour': { 'order': direction } }, 'newin': { 'created_at': { 'order': direction } }, # '_score': '_score', ProductSearchCriteria.SORT_COLUMN_PERCENTAGE_SCORE: { 'percentage_score': { 'order': direction } }, 'search_query': { 'search_query': { 'order': direction } }, 'price': { '_script': { "type": "number", "script": { "lang": "painless", # see __convert_item_calculate_prices() "source": "{price} - {price} * {discount} / 100".format( **{ 'price': "doc['rs_selling_price'].value", 'discount': "doc['discount'].value" }), }, "order": direction } }, } if column_name not in sort_map.keys(): raise ValueError( 'Oh, no! {} does not know, how to {} with {} column!'.format( ScoredProduct.__qualname__, '__convert_sort_filter', column_name)) return sort_map[column_name] @property def elastic(self) -> Elastic: return self.__elastic @property def weight(self) -> ScoringWeight: if not self.__weight__: weight_model = WeightModel() self.__weight__ = weight_model.scoring_weight return self.__weight__ def __update_by_query(self, query: dict): return self.elastic.update_by_query(query) def __convert_products(self, data, tier: dict = None, is_anyonimous: bool = False): ret = { "total": data["total"], "products": [ self.__convert_item(item["_source"], tier=tier, is_anyonimous=is_anyonimous) for item in data["hits"] ] } return ret @staticmethod def __convert_item_calculate_prices(item) -> tuple: original_price = float(item['rs_selling_price'] or 0) discount = float(item['discount'] or 0) current_price = original_price - original_price * discount / 100 return original_price, current_price def __convert_item(self, item, tier: dict = None, is_anyonimous: bool = False): original_price, current_price = self.__class__.__convert_item_calculate_prices( item) fbucks = None if isinstance( tier, dict) and not tier.get('is_neutral') and not is_anyonimous: fbucks = math.ceil(item['current_price'] * tier['discount_rate'] / 100) result = { 'id': item['portal_config_id'], 'sku': item['rs_sku'], 'event_code': item['event_code'], 'title': item['product_name'], 'subtitle': item['product_description'], 'price': item['rs_selling_price'], 'discount': item['discount'], 'original_price': original_price, 'current_price': current_price, 'fbucks': fbucks, # 'badge': 'NEW IN' if datetime.strptime(item['created_at'], "%Y-%m-%d %H:%M:%S") > from_date else None, 'product_type': item['product_size_attribute'], 'product_sub_type': item['rs_product_sub_type'], 'gender': item['gender'], 'brand': item['manufacturer'], 'color': item['rs_colour'], 'sizes': [{ 'size': size['size'], 'qty': size['qty'], 'simple_sku': size['rs_simple_sku'], 'simple_id': size['portal_simple_id'], } for size in item.get('sizes', [])], 'image': { 'src': item['images'][0]['s3_filepath'] if len(item['images']) > 0 else 'https://www.supplyforce.com/ASSETS/WEB_THEMES//ECOMMERCE_STD_TEMPLATE_V2/images/NoImage.png', 'title': item['product_size_attribute'], }, 'scores': { 'version': self.weight.version, 'qs': item.get('question_score', 0), 'qw': self.weight.question, 'rs': item.get('order_score', 0), 'rw': self.weight.order, 'ts': item.get('tracking_score', 0), 'tw': self.weight.track, 'total': sum([ float(item.get('question_score', 0) or 0) * self.weight.question, float(item.get('order_score', 0) or 0) * self.weight.order, float(item.get('tracking_score', 0) or 0) * self.weight.track, ]), ProductSearchCriteria.SORT_COLUMN_PERCENTAGE_SCORE: item.get(ProductSearchCriteria.SORT_COLUMN_PERCENTAGE_SCORE, -1.00), } } if not is_anyonimous: result.update({ 'tracking_info': item.get('tracking_info', { 'views': 0, 'clicks': 0, 'visits': 0 }), 'is_seen': item.get('is_seen', False) }) return result def __bulk(self, actions: List[dict]) -> bool: try: count, _ = helpers.bulk(self.elastic.client, actions) return count > 0 except Exception as e: warn(str(e)) return False def __bulk_update(self, customer_id: str, products: List[ProductEntry]): if not customer_id: customer_id = 'BLANK' # TODO: Index name should not contain the char - "#". Should be updated here. actions = [{ '_index': self.INDEX_NAME, '_type': self.INDEX_NAME, '_id': "%s__%s" % (customer_id, product.rs_sku), '_source': { 'customer_id': customer_id, **product.to_dict(mode='scored') } } for product in products] return self.__bulk(actions) def __get_tracking_aggregation(self, customer_id: str, size: int = 500) -> Tuple[dict, dict]: query = { "aggs": { "product_types": { "terms": { "field": "product_size_attribute", "size": 1000 } }, "product_sub_types": { "terms": { "field": "rs_product_sub_type", "size": 1000 } }, "genders": { "terms": { "field": "gender", "size": 10 } }, "brands": { "terms": { "field": "manufacturer", "size": 1000 } }, "sizes": { "terms": { "field": "sizes.size", "size": 1000 } } }, "query": { "bool": { "must": [{ "match": { "customer_id": customer_id } }, { "bool": { "should": [{ "range": { "tracking_info.clicks": { "gt": 0 } } }, { "range": { "tracking_info.visits": { "gt": 0 } } }] } }] } }, "size": size } response = self.elastic.post_search(query) KEYS = [ 'brands', 'sizes', 'product_types', 'genders', 'product_sub_types' ] data = dict() for key, agg_data in response['aggregations'].items(): if key not in KEYS: continue data[key] = [bucket['key'] for bucket in agg_data['buckets']] products = dict() for hit in response['hits']['hits']: item = hit['_source'] products[item['rs_sku']] = { 'views': item.get('tracking_info', {}).get('views', 0), 'clicks': item.get('tracking_info', {}).get('clicks', 0), 'visits': item.get('tracking_info', {}).get('visits', 0), 'viewed_at': item.get('viewed_at'), } return data, products def __build_track_query(self, action_or_list: List[_BaseAction]) -> List[dict]: action_maps = { ViewAction: 'tracking_info.views', ClickAction: 'tracking_info.clicks', VisitAction: 'tracking_info.visits' } buffer = dict() if isinstance(action_or_list, _BaseAction): action_or_list = [action_or_list] # Grouping by action_type and customer_id for action in action_or_list: if not action_maps.get(action.__class__): warn("Unknown instance found - %s" % action.__class__) continue if buffer.get(action_maps[action.__class__]) is None: buffer[action_maps[action.__class__]] = {action.user_id: []} if buffer[action_maps[action.__class__]].get( action.user_id) is None: buffer[action_maps[action.__class__]][action.user_id] = [] buffer[action_maps[action.__class__]][action.user_id].append( action.config_sku) queries = list() date_str = self.now.strftime("%Y-%m-%d %H:%M:%S") for action_type, user_data in buffer.items(): for customer_id, config_skus in user_data.items(): if not customer_id: continue query = { "script": { "inline": "ctx._source.%s += params.step;"\ "ctx._source.viewed_at = params.viewed_at" % action_type, "lang": "painless", "params": { "step": 1, "viewed_at": date_str, }, "upsert": { action_type : 1, "viewed_at": date_str, } }, "query": { "bool": { "must": [ { "term": { "customer_id": customer_id } }, { "terms": { "rs_sku": config_skus } } ] } } } queries.append(query) results = list() for query in queries: results.append(self.__update_by_query(query)) return results def __makeESFilterFromCustomFilter(self, custom_filters: Optional[dict] = None, customer_id: str = None): if not customer_id: customer_id = 'BLANK' ret = {} ret['bool'] = {} ret['bool']['must'] = [{"match": {"customer_id": customer_id}}] for key, value in custom_filters.items() if custom_filters else []: key = self.__class__.__convert_filter(key) if (key == "invalid_name"): continue must_item = {} if key == 'rs_selling_price': must_item['range'] = {} must_item['range'][key] = {} must_item['range'][key]['gte'] = value[0] must_item['range'][key]['lte'] = value[1] elif key == 'search_query': value = str(value or '').strip() if value: must_item['bool'] = { "should": [ { "match_phrase_prefix": { "product_name": value } }, # {"match_phrase_prefix": {"product_description": value}} When search_query is 'Dress', this returns some socks and shoes { "match_phrase_prefix": { "product_size_attribute": value } } ] } elif key == 'created_at': if value == 'true': from_date = (self.now - timedelta(days=settings.NEW_PRODUCT_THRESHOLD) ).strftime(DATETIME_FORMAT) must_item['range'] = {} must_item['range'][key] = {} must_item['range'][key]['gte'] = from_date else: continue else: if isinstance(value, list): must_item['bool'] = {} must_item['bool']['should'] = [] for _value in value: match = {} match['match'] = {} match['match'][key] = _value must_item['bool']['should'].append(match) else: must_item['match'] = {} must_item['match'][key] = value if must_item: ret['bool']['must'].append(must_item) return ret def __get_sort_option_by_score(self) -> dict: return { "_script": { "type": "number", "script": { "lang": "painless", "source": "doc['question_score'].value * params.qw +"\ "doc['order_score'].value * params.rw +"\ "doc['tracking_score'].value * params.tw", "params": { "qw": self.weight.question, "rw": self.weight.order, "tw": self.weight.track, } }, "order": "desc" } } def __get_sort_option_by_viewed_at(self) -> dict: return {"viewed_at": {"order": "asc"}} def __get_inline_script(self, attr: str, value: Union[str, int, float, dict], params_name: str = 'params', prefix: str = None, context_prefix: str = "ctx._source"): results = list() if prefix: attr_name = "%s.%s" % (prefix, attr) else: attr_name = attr if isinstance(value, dict): for key, data in value.items(): results += self.__get_inline_script(key, data, prefix=attr_name) else: return [ "%s.%s = params.%s" % (context_prefix, attr_name, attr_name) ] return results def __get_query_params(self, attr: str, value: Union[str, int, float, dict], prefix: str = None): results = list() if prefix: attr_name = "%s.%s" % (prefix, attr) else: attr_name = attr if isinstance(value, dict): for key, data in value.items(): results += self.__get_query_params(key, data, prefix=attr_name) else: return [(attr_name, value)] return results def __get_from_index(self, page: int = 1, size: int = 20) -> int: fromindex = (int(page) - 1) * int(size) if fromindex < 0: fromindex = 0 return fromindex def calculate_scores(self, email: str = None, size: int = 500): username: str = None if email: username: str = get_username_from_email(email) if username: # Track personalize progress customer_state = CustomerStateModel(username, email) customer_state.personalize_in_progress = True username, products = get_bucket_data(email, username=username, size=size) if username: trackings, tracking_dictionary = self.__get_tracking_aggregation( username, size=size) tracking_data = UserTrackEntry(len(products), **trackings) for product in products: if tracking_dictionary.get(product.rs_sku): product.views = tracking_dictionary[ product.rs_sku]['views'] product.clicks = tracking_dictionary[ product.rs_sku]['clicks'] product.visits = tracking_dictionary[ product.rs_sku]['visits'] product.viewed_at = tracking_dictionary[ product.rs_sku]['viewed_at'] product.apply_trackings(tracking_data) # NOTE: Calculating percentage score score_range = PercentageScoreRange() for product in products: product.score_range = score_range if product.total_score > score_range.max_score: score_range.max_score = product.total_score if product.total_score < score_range.min_score: score_range.min_score = product.total_score response = self.__bulk_update(username, products) if username: customer_state.personalize_in_progress = False return response def track(self, action_or_list: Union[_BaseAction, List[_BaseAction]]): self.__update_by_query(self.__build_track_query(action_or_list)) if isinstance(action_or_list, _BaseAction): action_or_list = [action_or_list] customer_ids = list( set([ item.user_id for item in action_or_list if isinstance(item, (ClickAction, VisitAction)) ])) for customer_id in customer_ids: CustomerStateModel(customer_id).clicked_now() # Keep the original tracking module for now. if self.__TRACK_IN_DATALAKE__: ProductsTrackingModel.track(action_or_list) def listByCustomFilter(self, customer_id: str = None, email: str = None, custom_filters: Optional[dict] = None, sorts: dict = {}, sort_by_score: bool = True, tier: dict = None, page=1, size=18): if not customer_id and isinstance(email, str): customer_id = get_username_from_email(email) filters = self.__makeESFilterFromCustomFilter(custom_filters, customer_id=customer_id) # NOTE: Always score by percentage score percentage_score_column = ProductSearchCriteria.SORT_COLUMN_PERCENTAGE_SCORE sorts[percentage_score_column] = sorts.get( percentage_score_column) or "desc" query = { "query": filters, "size": size, "from": self.__get_from_index(page=page, size=size), "sort": [ self.__class__.__convert_sort_filter(column, direction) for column, direction in sorts.items() ], } response = self.__elastic.post_search(query)['hits'] return self.__convert_products(response, tier=tier, is_anyonimous=(not customer_id)) def update(self, config_sku: str, data: dict): json_data = {"doc": data} inline_scripts = list() params_list = list() for key, value in data.items(): inline_scripts.append("ctx._source.%s = params.%s" % (key, key)) query = { "script": { "inline": ";".join(inline_scripts), "lang": "painless", "params": data, "upsert": data }, "query": { "bool": { "must": [{ "term": { "rs_sku": config_sku } }] } } } response = self.__update_by_query(query) return response def get_new_products(self, customer_id: str = None, gender: str = None, tier: dict = None, page: int = 1, size: int = 20, **kwargs): filters = { 'gender': [gender] if gender and gender.strip().lower() != 'unisex' else [], 'newin': 'true' } filters = self.__makeESFilterFromCustomFilter(filters, customer_id=customer_id) sort_options = [self.__get_sort_option_by_score()] query = { "query": filters, "size": size, "from": self.__get_from_index(page=page, size=size), "sort": sort_options, } response = self.__elastic.post_search(query)['hits'] return self.__convert_products(response, tier=tier, is_anyonimous=(not customer_id)) def get_last_chance(self, customer_id: str = None, gender: str = None, tier: dict = None, page=1, size=20, **kwargs): end_date = (self.now - timedelta(days=settings.LAST_CHANCE_END_DATE_THRESHOLD) ).strftime(DATETIME_FORMAT) if not customer_id: customer_id = 'BLANK' offset = self.__get_from_index(page=page, size=size) query = { "query": { "bool": { "must": [{ "term": { "customer_id": customer_id } }, { "range": { "created_at": { "lt": end_date } } }, { "range": { "sizes.qty": { "lte": settings.LAST_CHANCE_STOCK_THRESHOLD, "gt": 0 } } }] } }, "aggs": { "sum_of_order_score": { "sum": { "field": "order_score" } }, "sum_of_question_score": { "sum": { "field": "question_score" } }, "sum_of_tracking_score": { "sum": { "field": "tracking_score" } }, "sum_sort": { "bucket_sort": { "sort": [{ "sum_of_question_score": { "order": "desc" } }, { "sum_of_order_score": { "order": "desc" } }, { "sum_of_tracking_score": { "order": "desc" } }] } } }, "size": 0 } if gender and gender.strip().lower() != 'unisex': query['query']['bool']['must'].append( {"terms": { "gender": [gender] }}, ) response = self.__elastic.post_search(query) bucket = response['aggregations']['product_type_terms']['buckets'][ offset:offset + size] product_type_model = ProductType() product_types = product_type_model.filter_by_product_type_name( [item['key'] for item in bucket]) dictionary = dict([(item['key'], item['doc_count']) for item in bucket]) return [{ 'id': int(item['product_type_id']), 'name': item['product_type_name'], 'count': dictionary.get(item['product_type_name']), 'image': { 'src': item['image'], 'title': item['product_type_name'] } } for item in product_types] def get( self, id, # config_sku customer_id: str = None, tier: dict = None, log: bool = False, session_id: str = None): if not customer_id: customer_id = 'BLANK' item = self.elastic.get_data(f"{customer_id}__{id}") if not item: response = self.elastic.post_search({ "query": { "bool": { "must": [{ "term": { "customer_id": customer_id } }, { "term": { "rs_sku": id } }] } }, "size": 1 })['hits'] if response['total'] > 0: item = response['hits'][0]['_source'] if log and isinstance(item, dict): # TODO: refactoring - move out from model's method log_model = ProductVisitLog(session_id, customer_id=customer_id) log_model.insert(self.__convert_item(item, tier=tier)) # Fix "An AttributeValue may not contain an empty string" error item['size_chart'] = item.get('size_chart') or None item['img'] = { 'media_gallery': item.get('img', {}).get('media_gallery', []), 'images': { 'lifestyle': item.get('img', {}).get('images', {}).get('lifestyle') or None, 'small': item.get('img', {}).get('images', {}).get('small') or None, 'back': item.get('img', {}).get('images', {}).get('back') or None, } } return item def get_categories_by_gender(self, gender: str, customer_id: str = None, user_defined_product_types: list = [], **kwargs): if not customer_id: customer_id = 'BLANK' if not gender or gender.lower() == 'unisex': gender = 'ladies' categories = Category().get_by_gender(gender) product_types = [item['product_type_name'] for item in categories] # NOTE: Filter use defined products by stored categories user_defined_product_types = [ item for item in user_defined_product_types if item in product_types ] query = { "query": { "bool": { "must": [{ "term": { "customer_id": customer_id } }, { "term": { "gender": gender.upper() } }, { "terms": { "product_size_attribute": product_types } }, { "range": { "sizes.qty": { "gt": 0 } } }] } }, "aggs": { "product_type_terms": { "terms": { "field": "product_size_attribute" }, "aggs": { "sum_of_order_score": { "sum": { "field": "order_score" } }, "sum_of_question_score": { "sum": { "field": "question_score" } }, "sum_of_tracking_score": { "sum": { "field": "tracking_score" } }, "sum_sort": { "bucket_sort": { "sort": [{ "sum_of_question_score": { "order": "desc" } }, { "sum_of_order_score": { "order": "desc" } }, { "sum_of_tracking_score": { "order": "desc" } }] } } } } }, "size": 0 } response = self.__elastic.post_search(query) buckets = response['aggregations']['product_type_terms']['buckets'] sorted_product_types = [item['key'] for item in buckets] # NOTE: Re-sort whether it liked by customer or not sorted_product_types = user_defined_product_types +\ [item for item in sorted_product_types if item not in user_defined_product_types] return sorted( categories, key=lambda x: sorted_product_types.index(x['product_type_name']) if x['product_type_name'] in sorted_product_types else len( categories)) def get_complete_looks(self, id, customer_id='BLANK', tier: dict = None, page=1, size=20, **kwargs): offset = (page - 1) * size item = self.get(id, customer_id=customer_id, tier=tier) if item is None: return [] product_type = item.get('product_size_attribute') sub_type = item.get('rs_product_sub_type') gender = item.get('gender') # product_type_model = ProductType() # item = product_type_model.get_root_node(product_type_name=product_type) product_types = [ item['product_type_name'] for item in self.get_categories_by_gender( gender, customer_id=customer_id, size=5) ] query = { "query": { "bool": { "must": [ { "term": { "gender": gender } }, { "range": { "sizes.qty": { "gt": 0 } } }, # { # "terms": {"product_size_attribute": product_types} # }, { "term": { "product_size_attribute": product_type } }, ], "must_not": [{ "term": { "rs_product_sub_type": sub_type } }], } }, "from": offset, "size": size } response = self.elastic.post_search(query) return self.__convert_products(response['hits'], tier=tier)['products'] def get_sizes_by_product_type(self, product_type: str, gender: str, customer_id: str = 'BLANK', **kwargs): if not customer_id: customer_id = 'BLANK' query = { "bool": { "must": [{ "term": { "product_size_attribute": product_type } }, { 'term': { 'customer_id': customer_id } }, { "range": { "sizes.qty": { "gt": 0 } } }] } } if gender.lower() != 'unisex': query['bool']['must'].append({"term": {"gender": gender}}) aggs = { "product_size_terms": { "terms": { "field": "sizes.size" }, "aggs": { "sum_of_order_score": { "sum": { "field": "order_score" } }, "sum_of_question_score": { "sum": { "field": "question_score" } }, "sum_of_tracking_score": { "sum": { "field": "tracking_score" } }, "sum_sort": { "bucket_sort": { "sort": [{ "sum_of_question_score": { "order": "desc" } }, { "sum_of_order_score": { "order": "desc" } }, { "sum_of_tracking_score": { "order": "desc" } }] } } } } } response = self.elastic.post_search({ 'query': query, 'aggs': aggs, 'size': 0 }) return [ item['key'] for item in response['aggregations'] ['product_size_terms']['buckets'] ] def get_by_size(self, product_size: str, customer_id: str = None, product_type: str = None, gender: str = None, tier: dict = None, page: int = 1, size: int = 20, **kwargs): offset = (page - 1) * size if not customer_id: customer_id = 'BLANK' query = { "query": { "bool": { "must": [{ "term": { "customer_id": customer_id } }, { "term": { "sizes.size": product_size } }, { "range": { "sizes.qty": { "gt": 0 } } }] } }, "from": offset, "size": size } if product_type: query['query']['bool']['must'].append( {"term": { "product_size_attribute": product_type }}) if gender and gender.lower() != 'unisex': query['query']['bool']['must'].append({'term': {'gender': gender}}) response = self.elastic.post_search(query) return self.__convert_products(response['hits'], tier=tier)['products'] def get_top_brands(self, customer_id: str = 'BLANK', user_defined: List[str] = [], exclude: List[str] = [], page: int = 1, size: int = 20, **kwargs) -> List[dict]: offset = (page - 1) * size exclude = [item.strip().lower() for item in exclude] from_date = (self.now - timedelta(days=settings.NEW_PRODUCT_THRESHOLD) ).strftime(DATETIME_FORMAT) if not customer_id: customer_id = 'BLANK' query = { "query": { "bool": { "must": [ { "term": { "customer_id": customer_id } }, { "range": { "sizes.qty": { "gt": 0 } } }, ], "must_not": [{ "terms": { "brand_code": exclude } }] } }, "aggs": { "available_brands": { "terms": { "field": "manufacturer", "size": 1000 }, "aggs": { "new_items": { "range": { "field": "created_at", "ranges": [{ "from": from_date }] } }, "sum_of_order_score": { "sum": { "field": "order_score" } }, "sum_of_question_score": { "sum": { "field": "question_score" } }, "sum_of_tracking_score": { "sum": { "field": "tracking_score" } }, "sum_sort": { "bucket_sort": { "sort": [{ "sum_of_question_score": { "order": "desc" } }, { "sum_of_order_score": { "order": "desc" } }, { "sum_of_tracking_score": { "order": "desc" } }] } } } } }, "size": 0 } response = self.elastic.post_search(query) brand_model = Brand() buckets = response['aggregations']['available_brands']['buckets'] brand_names = [item['key'] for item in buckets] brand_names = sorted( brand_names, key=lambda x: user_defined.index(x) if x in user_defined else len(brand_names))[offset:offset + size] buckets = dict([(item['key'].lower(), { 'new': item['new_items']['buckets'][0]['doc_count'] > 0, 'available_items': item['doc_count'], 'new_items': item['new_items']['buckets'][0]['doc_count'] }) for item in buckets if item['key'] in brand_names]) brands = brand_model.filter_by_brand_names(brand_names)["Items"] for brand in brands: if buckets.get(brand['brand_name'].lower()): brand.update(buckets.get(brand['brand_name'].lower())) else: del brand return brands
class Product(object): # @todo : refactoring ~ get_all(criteria, limit, offset) def __init__(self): self.__elastic = Elastic(settings.AWS_ELASTICSEARCH_PRODUCTS, settings.AWS_ELASTICSEARCH_PRODUCTS) def get_all(self, convert: bool = False): # TODO: Should be refactored later. offset, CHUNK_SIZE = 0, 1000 products = [] query = { "size": CHUNK_SIZE, "from": offset, } response = self.__elastic.post_search(query)['hits'] total = response['total'] products += response['hits'] while len(products) < total: offset += CHUNK_SIZE query = { "size": CHUNK_SIZE, "from": offset, } response = self.__elastic.post_search(query)['hits'] products += response['hits'] if convert: return self.__convert_products({'total': total, 'hits': products}) else: return [ProductEntry(**item['_source']) for item in products] def listAll(self, sort, order, page=1, size=18): fromindex = (int(page) - 1) * int(size) if fromindex < 0: fromindex = 0 sort = self.__class__.__convert_filter(sort) if sort == "invalid_name": return {"error": "invalid sort field"} query = { "size": size, "from": fromindex, "sort": [{ sort: { "order": order } }] } response = self.__elastic.post_search(query)['hits'] return self.__convert_products(response) def __makeESFilterFromCustomFilter(self, custom_filters: Optional[dict]): ret = {} ret['bool'] = {} ret['bool']['must'] = list() for key, value in custom_filters.items() if custom_filters else []: key = self.__class__.__convert_filter(key) if (key == "invalid_name"): continue must_item = {} if key == 'rs_selling_price': must_item['range'] = {} must_item['range'][key] = {} must_item['range'][key]['gte'] = value[0] must_item['range'][key]['lte'] = value[1] elif key == 'search_query': value = str(value or '').strip() if value: must_item['bool'] = { "should": [ { "match_phrase_prefix": { "product_name": value } }, # {"match_phrase_prefix": {"product_description": value}} When search_query is 'Dress', this returns some socks and shoes { "match_phrase_prefix": { "product_size_attribute": value } } ] } elif key == 'created_at': if value == 'true': from_date = (datetime.now() - timedelta(days=settings.NEW_PRODUCT_THRESHOLD) ).strftime("%Y-%m-%d %H:%M:%S") must_item['range'] = {} must_item['range'][key] = {} must_item['range'][key]['gte'] = from_date else: continue else: if isinstance(value, list): must_item['bool'] = {} must_item['bool']['should'] = [] for _value in value: match = {} match['match'] = {} match['match'][key] = _value must_item['bool']['should'].append(match) else: must_item['match'] = {} must_item['match'][key] = value if must_item: ret['bool']['must'].append(must_item) return ret def listByCustomFilter(self, custom_filters: Optional[dict], sorts: dict, tier: dict, page: int, size: int): filters = self.__makeESFilterFromCustomFilter(custom_filters) fromindex = (int(page) - 1) * int(size) if fromindex < 0: fromindex = 0 query = { "query": filters, "size": size, "from": fromindex, "sort": [ self.__class__.__convert_sort_filter(column, direction) for column, direction in sorts.items() ], } response = self.__elastic.post_search(query)['hits'] return self.__convert_products(response, tier=tier) def update(self, config_sku, data): json_data = {"doc": data} response = self.__elastic.update_data(config_sku, json_data) return response def get(self, id, log: bool = False, session_id: str = None, customer_id: str = None): item = self.__elastic.get_data(id) if log and item is not None: # TODO: refactoring - move out from model's method log_model = ProductVisitLog(session_id, customer_id=customer_id) log_model.insert(self.__convert_item(item)) # Fix "An AttributeValue may not contain an empty string" error item['size_chart'] = item.get('size_chart') or None item['img'] = { 'media_gallery': item.get('img', {}).get('media_gallery', []), 'images': { 'lifestyle': item.get('img', {}).get('images', {}).get('lifestyle') or None, 'small': item.get('img', {}).get('images', {}).get('small') or None, 'back': item.get('img', {}).get('images', {}).get('back') or None, } } return item def __convert_products(self, data, tier: dict = None): ret = { "total": data["total"], "products": [ self.__convert_item(item["_source"], tier=tier) for item in data["hits"] ] } return ret @staticmethod def __convert_item_calculate_prices(item) -> tuple: original_price = float(item['rs_selling_price'] or 0) discount = float(item['discount'] or 0) current_price = original_price - original_price * discount / 100 return original_price, current_price def __convert_item(self, item, tier: dict = None): original_price, current_price = self.__class__.__convert_item_calculate_prices( item) fbucks = None if isinstance(tier, dict) and not tier.get('is_neutral'): fbucks = math.ceil( item.get('current_price', current_price) * tier['discount_rate'] / 100) result = { 'id': item['portal_config_id'], 'sku': item['rs_sku'], 'event_code': item['event_code'], 'title': item['product_name'], 'subtitle': item['product_description'], 'price': item['rs_selling_price'], 'discount': item['discount'], 'original_price': original_price, 'current_price': current_price, 'fbucks': fbucks, # 'badge': 'NEW IN' if datetime.strptime(item['created_at'], "%Y-%m-%d %H:%M:%S") > from_date else None, 'product_type': item['product_size_attribute'], 'product_sub_type': item['rs_product_sub_type'], 'gender': item['gender'], 'brand': item['manufacturer'], 'color': item['rs_colour'], 'sizes': [{ 'size': size['size'], 'qty': size['qty'], 'simple_sku': size['rs_simple_sku'], 'simple_id': size['portal_simple_id'], } for size in item.get('sizes', [])], 'image': { 'src': item['images'][0]['s3_filepath'] if len(item['images']) > 0 else 'https://www.supplyforce.com/ASSETS/WEB_THEMES//ECOMMERCE_STD_TEMPLATE_V2/images/NoImage.png', 'title': item['product_size_attribute'], } } return result @staticmethod def __convert_filter(filter_name): switcher = { 'id': 'portal_config_id', 'sku': 'rs_sku', 'title': 'product_name', 'subtitle': 'product_description', 'price': 'rs_selling_price', 'product_type': 'product_size_attribute', 'product_sub_type': 'rs_product_sub_type', 'gender': 'gender', 'brand': 'manufacturer', 'size': 'sizes.size', 'color': 'rs_colour', # 'newin': 'created_at', '_score': '_score', 'search_query': 'search_query' } return switcher.get(filter_name, "invalid_name") @staticmethod def __convert_sort_filter(column_name, direction): sort_map = { 'id': { 'portal_config_id': { 'order': direction } }, 'sku': { 'rs_sku': { 'order': direction } }, 'title': { 'product_name': { 'order': direction } }, 'subtitle': { 'product_description': { 'order': direction } }, 'product_type': { 'product_size_attribute': { 'order': direction } }, 'product_sub_type': { 'rs_product_sub_type': { 'order': direction } }, 'gender': { 'gender': { 'order': direction } }, 'brand': { 'manufacturer': { 'order': direction } }, 'size': { 'sizes.size': { 'order': direction } }, 'color': { 'rs_colour': { 'order': direction } }, 'newin': { 'created_at': { 'order': direction } }, '_score': { '_score': { 'order': direction } }, 'search_query': { 'search_query': { 'order': direction } }, 'price': { '_script': { "type": "number", "script": { "lang": "painless", # see __convert_item_calculate_prices() "source": "{price} - {price} * {discount} / 100".format( **{ 'price': "doc['rs_selling_price'].value", 'discount': "doc['discount'].value" }), }, "order": direction } } } if column_name not in sort_map.keys(): raise ValueError( 'Oh, no! {} does not know, how to {} with {} column!'.format( Product.__qualname__, '__convert_sort_filter', column_name)) return sort_map[column_name] def __getSumofQTY(self, data): filters = self.__makeESFilterFromCustomFilter(data) query = { "query": filters, "size": 0, "aggregations": { "count": { "sum": { "field": "sizes.qty" } } } } aggregations = self.__elastic.post_search(query)['aggregations'] return aggregations['count']['value'] def __getAvailableSubType(self, data, sort): filters = self.__makeESFilterFromCustomFilter(data) query = { "query": filters, "size": 0, "aggregations": { "product_sub_type": { "terms": { "field": "rs_product_sub_type", "size": 100000 } } } } aggregations = self.__elastic.post_search(query)['aggregations'] sub_types = [] for item in aggregations['product_sub_type']['buckets']: sub_types.append(item['key']) if sort == 'asc': sub_types.sort() elif sort == 'desc': sub_types.sort(reverse=True) ret = [] for sub_type in sub_types: new_filter = data.copy() new_filter['product_sub_type'] = sub_type ret.append({ 'label': sub_type, # 'count': self.__getSumofQTY(new_filter) }) return ret # This is too slow. def getNewAvailableFilter(self, data, sort): filters = self.__makeESFilterFromCustomFilter(data) query = { "query": filters, "size": 0, "aggregations": { "gender": { "terms": { "field": "gender", "size": 100000 } }, "price": { "terms": { "field": "rs_selling_price", "size": 100000 } }, "product_type": { "terms": { "field": "product_size_attribute", "size": 100000 } }, "brand": { "terms": { "field": "manufacturer", "size": 100000 } }, "size": { "terms": { "field": "sizes.size", "size": 100000 } }, "color": { "terms": { "field": "rs_colour", "size": 100000 } } } } aggregations = self.__elastic.post_search(query)['aggregations'] availablefilter = { 'product_type': [], 'brand': [], 'size': [], 'color': [], 'price': [], 'gender': [] } for item in aggregations['product_type']['buckets']: product_type = {} product_type['label'] = item['key'] new_filter = data.copy() new_filter['product_type'] = item['key'] product_type['children'] = self.__getAvailableSubType( new_filter, sort) # product_type['count']=self.__getSumofQTY(new_filter) availablefilter['product_type'].append(product_type) for item in aggregations['brand']['buckets']: brand = {} brand['label'] = item['key'] new_filter = data.copy() new_filter['brand'] = item['key'] # brand['count']=self.__getSumofQTY(new_filter) availablefilter['brand'].append(brand) for item in aggregations['color']['buckets']: color = {} color['label'] = item['key'] new_filter = data.copy() new_filter['color'] = item['key'] # color['count']=self.__getSumofQTY(new_filter) availablefilter['color'].append(color) for item in aggregations['size']['buckets']: size = {} size['label'] = item['key'] availablefilter['size'].append(size) for item in aggregations['price']['buckets']: availablefilter['price'].append(item['key']) for item in aggregations['gender']['buckets']: gender = {} gender['label'] = item['key'] new_filter = data.copy() new_filter['gender'] = item['key'] # gender['count']=self.__getSumofQTY(new_filter) availablefilter['gender'].append(gender) for key, items in availablefilter.items(): if key == 'size': _sizes = [item['label'] for item in items] _sorted_sizes = ProductSizeSort().sort(_sizes) if sort == 'desc': _sorted_sizes.reverse() availablefilter['size'].clear() for _item in _sorted_sizes: size = {} size['label'] = _item new_filter = data.copy() new_filter['size'] = _item # size['count']=self.__getSumofQTY(new_filter) availablefilter['size'].append(size) elif key == 'price': price_list = { 'Under R100': 0, 'R100 - R250': 0, 'R250 - R500': 0, 'R500 - R750': 0, 'R750 - R1,000': 0, 'R1,000 - R2,000': 0, 'Over R2,000': 0 } for _item in items: if _item < 100: price_list['Under R100'] += 1 continue elif _item < 250: price_list['R100 - R250'] += 1 continue elif _item < 500: price_list['R250 - R500'] += 1 continue elif _item < 750: price_list['R500 - R750'] += 1 continue elif _item < 1000: price_list['R750 - R1,000'] += 1 continue elif _item < 2000: price_list['R1,000 - R2,000'] += 1 continue else: price_list['R1,000 - R2,000'] += 1 availablefilter['price'].clear() if sort == 'asc': if price_list['Under R100'] > 0: availablefilter['price'].append({ 'label': 'Under R100', 'value': [0, 100] }) if price_list['R100 - R250'] > 0: availablefilter['price'].append({ 'label': 'R100 - R250', 'value': [100, 250] }) if price_list['R250 - R500'] > 0: availablefilter['price'].append({ 'label': 'R250 - R500', 'value': [250, 500] }) if price_list['R500 - R750'] > 0: availablefilter['price'].append({ 'label': 'R500 - R750', 'value': [500, 750] }) if price_list['R750 - R1,000'] > 0: availablefilter['price'].append({ 'label': 'R750 - R1,000', 'value': [750, 1000] }) if price_list['R1,000 - R2,000'] > 0: availablefilter['price'].append({ 'label': 'R1,000 - R2,000', 'value': [1000, 2000] }) if price_list['Over R2,000'] > 0: availablefilter['price'].append({ 'label': 'Over R2,000', 'value': [2000, 10000] }) elif sort == 'desc': if price_list['Over R2,000'] > 0: availablefilter['price'].append({ 'label': 'Over R2,000', 'value': [2000, 10000] }) if price_list['R1,000 - R2,000'] > 0: availablefilter['price'].append({ 'label': 'R1,000 - R2,000', 'value': [1000, 2000] }) if price_list['R750 - R1,000'] > 0: availablefilter['price'].append({ 'label': 'R750 - R1,000', 'value': [750, 1000] }) if price_list['R500 - R750'] > 0: availablefilter['price'].append({ 'label': 'R500 - R750', 'value': [500, 750] }) if price_list['R250 - R500'] > 0: availablefilter['price'].append({ 'label': 'R250 - R500', 'value': [250, 500] }) if price_list['R100 - R250'] > 0: availablefilter['price'].append({ 'label': 'R100 - R250', 'value': [100, 250] }) if price_list['Under R100'] > 0: availablefilter['price'].append({ 'label': 'Under R100', 'value': [0, 100] }) else: if sort == 'asc': availablefilter[key] = sorted( items, key=lambda i: i['label'].lower()) elif sort == 'desc': availablefilter[key] = sorted( items, key=lambda i: i['label'].lower(), reverse=True) return availablefilter def getAvailableFilter(self, data, sort): filters = self.__makeESFilterFromCustomFilter(data) query = { "query": filters, "size": 0, "aggregations": { "gender": { "terms": { "field": "gender", "size": 100000 } }, "price": { "terms": { "field": "rs_selling_price", "size": 100000 } }, "product_type": { "terms": { "field": "product_size_attribute", "size": 100000 } }, "product_sub_type": { "terms": { "field": "rs_product_sub_type", "size": 100000 } }, "brand": { "terms": { "field": "manufacturer", "size": 100000 } }, "size": { "terms": { "field": "sizes.size", "size": 100000 } }, "color": { "terms": { "field": "rs_colour", "size": 100000 } } } } aggregations = self.__elastic.post_search(query)['aggregations'] availablefilter = { 'product_type': [], 'product_sub_type': [], 'brand': [], 'size': [], 'color': [], 'price': [], 'gender': [] } for item in aggregations['product_type']['buckets']: availablefilter['product_type'].append(item['key']) for item in aggregations['product_sub_type']['buckets']: availablefilter['product_sub_type'].append(item['key']) for item in aggregations['brand']['buckets']: availablefilter['brand'].append(item['key']) for item in aggregations['color']['buckets']: availablefilter['color'].append(item['key']) for item in aggregations['size']['buckets']: availablefilter['size'].append(item['key']) for item in aggregations['price']['buckets']: availablefilter['price'].append(item['key']) for item in aggregations['gender']['buckets']: availablefilter['gender'].append(item['key']) for key, items in availablefilter.items(): if key == 'size': sorted_sizes = ProductSizeSort().sort(items) if sort == 'desc': sorted_sizes.reverse() availablefilter[key] = sorted_sizes else: if sort == 'asc': items.sort() elif sort == 'desc': items.sort(reverse=True) return availablefilter def updateStock(self, items): try: total_found_item_count = 0 updated_item_count = 0 added_item_count = 0 for item in items: script = ( "for(int j = 0; j < ctx._source.sizes.size(); j++) if(ctx._source.sizes[j].rs_simple_sku == '" + item['rs_simple_sku'] + "'){ ctx._source.sizes[j].qty = " + str(item['qty']) + "; break; }") query = { "script": script, "query": { "bool": { "must": [{ "match": { "sizes.portal_simple_id": item['product_simple_id'] } }, { "match": { "sizes.rs_simple_sku": item['rs_simple_sku'] } }] } } } res = self.__elastic.update_by_query(query) total_found_item_count += res['total'] updated_item_count += res['updated'] if res['total'] == 0: sku_size = item['rs_simple_sku'].split('-') rs_sku = sku_size[0] size = sku_size[1] product = self.get(rs_sku) if product is not None: _inline = "ctx._source.sizes.add(params.size)" _size = { "size": size, "portal_simple_id": item['product_simple_id'], "qty": item['qty'], "rs_simple_sku": item['rs_simple_sku'], } _query = { "script": { "lang": "painless", "inline": _inline, "params": { "size": _size } } } res = self.__elastic.update_data(rs_sku, _query) if res['_id'] == rs_sku: added_item_count += 1 return { 'total_found_item': total_found_item_count, 'updated_item': updated_item_count, 'added_item': added_item_count } except: return {'result': 'failure'} def getRawDataBySimpleSkus(self, simple_skus: Union[Tuple[str], List[str]], convert=True) -> Tuple[dict]: response_items = self.__elastic.post_search({ 'query': { 'bool': { 'filter': { 'terms': { 'sizes.rs_simple_sku': simple_skus } } } }, 'size': 10000, }).get('hits', {}).get('hits', []) or [] result = [ self.__convert_item(data['_source']) if convert else data['_source'] for data in response_items ] return tuple(result) def getRawDataBySimpleSku(self, simple_sku: str, convert=True) -> Optional[dict]: rows = self.getRawDataBySimpleSkus([simple_sku], convert) return rows[0] if rows else None def get_raw_data(self, config_sku: str, convert=False) -> Optional[dict]: try: product_data = self.__elastic.post_search({ 'query': { 'term': { 'rs_sku': config_sku } } }).get('hits', {}).get('hits', [{}])[0].get('_source') result = self.__convert_item( product_data) if convert else product_data return result except: return None def get_raw_data_by_skus(self, config_skus: List[str], convert=False) -> List[dict]: try: response = self.__elastic.post_search({ 'query': { 'terms': { 'rs_sku': config_skus } } }).get('hits', {}).get('hits', [{}]) if convert: return [ self.__convert_item(item['_source']) for item in response ] else: return [item['_source'] for item in response] except: return []
def __init__(self): self.__orders_storage = OrderStorageImplementation() self.__logger = Logger() # """ # curl -X DELETE localhost:9200/fbucks_handled_orders # curl -X PUT localhost:9200/fbucks_handled_orders -H "Content-Type: application/json" -d'{ # "mappings": { # "fbucks_handled_orders": { # "properties": { # "handled_at": {"type": "date", "format": "yyyy-MM-dd HH:mm:ss"} # } # } # } # }' # """ # self.__fbucks_handled_orders_elastic = Elastic( # settings.AWS_ELASTICSEARCH_FBUCKS_HANDLED_ORDERS, # settings.AWS_ELASTICSEARCH_FBUCKS_HANDLED_ORDERS, # ) self.__fbucks_handled_orders_dynamo_db = DynamoModel( settings.AWS_DYNAMODB_CMS_TABLE_NAME) self.__fbucks_handled_orders_dynamo_db.PARTITION_KEY = 'PURCHASE_FBUCKS_REWARD_HANDLED_ORDERS' # Attention! # We can get current customer's amount as a sum of all changes by customer_id # But theoretically elastic can not be in time with index update (1 second) between requests. # So there is another index to store amount value. """ curl -X DELETE localhost:9200/fbucks_customer_amount curl -X PUT localhost:9200/fbucks_customer_amount -H "Content-Type: application/json" -d'{ "mappings": { "fbucks_customer_amount": { "properties": { "amount": {"type": "integer"} } } } }' curl -X DELETE localhost:9200/fbucks_customer_amount_changes curl -X PUT localhost:9200/fbucks_customer_amount_changes -H "Content-Type: application/json" -d'{ "mappings": { "fbucks_customer_amount_changes": { "properties": { "customer_id": {"type": "keyword"}, "amount": {"type": "integer"}, "changed_at": {"type": "date", "format": "yyyy-MM-dd HH:mm:ss"}, "order_number": {"type": "keyword"} } } } }' """ self.__fbucks_customer_amount_elastic = Elastic( settings.AWS_ELASTICSEARCH_FBUCKS_CUSTOMER_AMOUNT, settings.AWS_ELASTICSEARCH_FBUCKS_CUSTOMER_AMOUNT, ) self.__fbucks_customer_amount_changes_elastic = Elastic( settings.AWS_ELASTICSEARCH_FBUCKS_CUSTOMER_AMOUNT_CHANGES, settings.AWS_ELASTICSEARCH_FBUCKS_CUSTOMER_AMOUNT_CHANGES, ) self.__customer_storage = CustomerStorageImplementation() self.__messages_storage = MessageStorageImplementation()
class OrderHandler(SqsHandlerInterface): def __init__(self) -> None: """ curl -X DELETE http://localhost:9200/personalization_orders curl -X PUT http://localhost:9200/personalization_orders -H "Content-Type: application/json" -d'{ "mappings": { "personalization_orders": { "properties": { "order_number": {"type": "keyword"}, "email": {"type": "keyword"}, "ordered_at": {"type": "date", "format": "yyyy-MM-dd HH:mm:ss"}, "rs_sku": {"type": "keyword"}, "rs_simple_sku": {"type": "keyword"}, "product_name": {"type": "keyword"}, "manufacturer": {"type": "keyword"}, "gender": {"type": "keyword"}, "product_size_attribute": {"type": "keyword"}, "rs_colour": {"type": "keyword"}, "size": {"type": "keyword"} } } } }' """ self.__elastic = Elastic( settings.AWS_ELASTICSEARCH_PERSONALIZATION_ORDERS, settings.AWS_ELASTICSEARCH_PERSONALIZATION_ORDERS) def handle(self, sqs_message: SqsMessage) -> None: order_number = str(sqs_message.message_data.get('order_number') or '').strip() or None order_items = sqs_message.message_data.get('order_items', []) or [] if not order_number or not order_items: raise ValueError('{} message data is incorrect: {}'.format( self.handle.__qualname__, sqs_message.message_data)) try: self.__elastic.delete_by_query( {"query": { "term": { "order_number": order_number } }}) emails = list() for order_item_data in order_items: emails.append(order_item_data.get('customer_email')) order_item = OrderItem( order_number, order_item_data.get('customer_email'), datetime.strptime(order_item_data.get('order_created_at'), '%Y-%m-%d %H:%M:%S'), order_item_data.get('product_config_sku'), order_item_data.get('product_simple_sku'), order_item_data.get('product_name'), order_item_data.get('product_brand_name'), order_item_data.get('product_gender_name'), order_item_data.get('product_type_name'), order_item_data.get('product_color_name'), order_item_data.get('product_size_name'), int(order_item_data.get('qty_ordered')), ) document_id = str(uuid4()) self.__elastic.create( document_id, { 'order_number': order_number, 'email': order_item.customer_email, 'ordered_at': order_item.ordered_at.strftime('%Y-%m-%d %H:%M:%S'), 'rs_sku': order_item.product_sku, 'rs_simple_sku': order_item.product_size_sku, 'product_name': order_item.product_name, 'manufacturer': order_item.product_brand_name, 'gender': order_item.product_gender_name, 'product_size_attribute': order_item.product_type_name, 'rs_colour': order_item.product_color_name, 'size': order_item.product_size_name, }) emails = list(set(emails)) User.send_calculate_product_score_for_customers(emails=emails) except BaseException as e: raise RuntimeError( '{} got an error, when tried to handle order {}. Error: {}'. format(self.handle.__qualname__, sqs_message.message_data, e))
def regular_eft_checkout(): checkout_storage = CheckoutStorageImplementation() order_storage = OrderStorageImplementation() order_app_service = OrderAppService() logger = Logger() mailer = MailerImplementation() # 1. Get or create order. Critical! # ------------------------------------------------------ try: user = __get_user() # @todo : refactoring checkout = checkout_storage.load(Id(user.id)) if not checkout: raise ApplicationLogicException('Checkout does not exist!') elif checkout.total_due.value == 0: raise ApplicationLogicException('Unable to checkout 0 amount with Regular Eft!') order = order_app_service.get_waiting_for_payment_by_checkout_or_checkout_new(user.id) def __log_order_flow(text: str) -> None: logger.log_simple('Regular EFT : Checkout : {} : {}'.format(order.number.value, text)) __log_order_flow('Start') # Attention! # Currently we use f-bucks only! Other credits are not available for now! # @todo : other credit types # @todo : copy-paste code # @todo : when reservation of credits amount will be done, perhaps, use sqs to spend credits if order.credit_spent_amount.value > 0: __log_order_flow('Spending Credits...') """""" from chalicelib.libs.purchase.core import Checkout see = Checkout.__init__ """""" # @TODO : refactoring : raw data usage import uuid import datetime from chalicelib.settings import settings from chalicelib.libs.core.elastic import Elastic fbucks_customer_amount_elastic = Elastic( settings.AWS_ELASTICSEARCH_FBUCKS_CUSTOMER_AMOUNT, settings.AWS_ELASTICSEARCH_FBUCKS_CUSTOMER_AMOUNT, ) fbucks_customer_amount_changes_elastic = Elastic( settings.AWS_ELASTICSEARCH_FBUCKS_CUSTOMER_AMOUNT_CHANGES, settings.AWS_ELASTICSEARCH_FBUCKS_CUSTOMER_AMOUNT_CHANGES, ) fbucks_customer_amount_elastic.update_data(order.customer_id.value, { 'script': 'ctx._source.amount -= ' + str(order.credit_spent_amount.value), }) fbucks_customer_amount_changes_elastic.create(str(uuid.uuid4()) + str(order.customer_id.value), { "customer_id": order.customer_id.value, "amount": -order.credit_spent_amount.value, "changed_at": datetime.datetime.now().strftime("%Y-%m-%d %H:%M:%S"), "order_number": order.number.value, }) __log_order_flow('Spending Credits: Done!') __log_order_flow('Order Updating...') order.payment_method = RegularEftOrderPaymentMethod() order_storage.save(order) __log_order_flow('Order Updated!') except BaseException as e: logger.log_exception(e) return http_response_exception_or_throw(e) # 2. Send eft email. Not critical. # Theoretically can be redone or downloaded manually. # ------------------------------------------------------ try: __log_order_flow('EFT Email Sending...') message = RegularEftBankDetailsMailMessage(order) mailer.send(message) __log_order_flow('EFT Email Sent!') except BaseException as e: logger.log_exception(e) __log_order_flow('EFT Email Not Sent because of Error: {}'.format(str(e))) # 3. Flush cart, checkout. Not critical. # ------------------------------------------------------ # flush cart try: __log_order_flow('Cart Flushing...') from chalicelib.libs.purchase.cart.service import CartAppService cart_service = CartAppService() cart_service.clear_cart(user.session_id) __log_order_flow('Cart Flushed!') except BaseException as e: logger.log_exception(e) __log_order_flow('Cart Not Flushed because of Error: {}'.format(str(e))) # flush checkout try: __log_order_flow('Checkout Flushing...') from chalicelib.libs.purchase.checkout.service import CheckoutAppService checkout_service = CheckoutAppService() checkout_service.remove(user.id) __log_order_flow('Checkout Flushed!') except BaseException as e: logger.log_exception(e) __log_order_flow('Checkout Not Flushed because of Error: {}'.format(str(e))) return { 'order_number': order.number.value }
def __return(cart_items: Tuple[Cart.Item], original_subtotal: float, current_subtotal: float, current_subtotal_vat_amount: float): tier = blueprint.current_request.current_user.profile.tier # fbucks available to spend available_fbucks_amount = None if not tier[ 'is_neutral'] and not blueprint.current_request.current_user.is_anyonimous: """""" # @TODO : REFACTORING !!! from chalicelib.libs.purchase.customer.sqs import FbucksChargeSqsHandler see = FbucksChargeSqsHandler """""" from chalicelib.settings import settings from chalicelib.libs.core.elastic import Elastic __fbucks_customer_amount_elastic = Elastic( settings.AWS_ELASTICSEARCH_FBUCKS_CUSTOMER_AMOUNT, settings.AWS_ELASTICSEARCH_FBUCKS_CUSTOMER_AMOUNT, ) fbucks_amount_row = __fbucks_customer_amount_elastic.get_data( blueprint.current_request.current_user.id) available_fbucks_amount = fbucks_amount_row[ 'amount'] or 0 if fbucks_amount_row else 0 products = MpcProduct() items_data = [] for cart_item in cart_items: product = products.getRawDataBySimpleSku( cart_item.simple_sku.value) product_sizes = product.get('sizes', []) if product else () size = tuple( filter( lambda s: s.get('simple_sku') == cart_item.simple_sku. value, product_sizes))[0] dtd = dtd_calculator.calculate(cart_item.simple_sku, cart_item.qty) item_fbucks = None if not tier[ 'is_neutral'] and not blueprint.current_request.current_user.is_anyonimous: item_fbucks = math.ceil(cart_item.current_cost.value * tier['discount_rate'] / 100) items_data.append({ 'sku': product['sku'], 'simple_sku': cart_item.simple_sku.value, 'name': product.get('title'), 'brand_name': product.get('brand'), 'size_name': size.get('size'), 'image_url': product.get('image', {}).get('src', None), 'qty_available': int(size.get('qty')), 'qty_added': cart_item.qty.value, 'is_added_over_limit': cart_item.is_added_over_limit, 'product_original_price': cart_item.product_original_price.value, 'product_current_price': cart_item.product_current_price.value, 'original_cost': cart_item.original_cost.value, 'current_cost': cart_item.current_cost.value, 'dtd': { 'occasion': { 'name': dtd.occasion.name.value, 'description': dtd.occasion.description.value, } if dtd.occasion else None, 'date_from': dtd.date_from.strftime('%Y-%m-%d'), 'date_to': dtd.date_to.strftime('%Y-%m-%d'), 'working_days_from': dtd.working_days_from, 'working_days_to': dtd.working_days_to, }, 'fbucks': item_fbucks, }) return { 'items': items_data, 'original_subtotal': original_subtotal, 'current_subtotal': current_subtotal, 'current_subtotal_vat_amount': current_subtotal_vat_amount, 'available_fbucks_amount': available_fbucks_amount, }
class _CreditCardsElasticStorage(CreditCardsStorageInterface): __ENTITY_PROPERTY_TOKEN = '__token' __ENTITY_PROPERTY_CUSTOMER_ID = '__customer_id' __ENTITY_PROPERTY_BRAND = '__brand' __ENTITY_PROPERTY_NUMBER_HIDDEN = '__number_hidden' __ENTITY_PROPERTY_EXPIRES = '__expires' __ENTITY_PROPERTY_HOLDER_NAME = '__holder_name' __ENTITY_PROPERTY_IS_VERIFIED = '__is_verified' __ENTITY_PROPERTY_CREATED_AT = '__created_at' def __init__(self): """ curl -X DELETE localhost:9200/purchase_customer_credit_cards curl -X PUT localhost:9200/purchase_customer_credit_cards -H "Content-Type: application/json" -d'{ "mappings": { "purchase_customer_credit_cards": { "properties": { "token": {"type": "keyword"}, "customer_id": {"type": "keyword"}, "brand": {"type": "keyword"}, "number_hidden": {"type": "keyword"}, "expires": {"type": "keyword"}, //2005 -> 2020/05 "holder_name": {"type": "keyword"}, "is_verified": {"type": "boolean"}, "created_at": {"type": "date", "format": "yyyy-MM-dd HH:mm:ss"} } } } }' curl -X DELETE localhost:9200/purchase_customer_credit_cards_customer_map curl -X PUT localhost:9200/purchase_customer_credit_cards_customer_map -H "Content-Type: application/json" -d'{ "mappings": { "purchase_customer_credit_cards_customer_map": { "properties": { "tokens_json": {"type": "keyword"} } } } }' """ self.__elastic_cards = Elastic( settings.AWS_ELASTICSEARCH_PURCHASE_CUSTOMER_CREDIT_CARDS, settings.AWS_ELASTICSEARCH_PURCHASE_CUSTOMER_CREDIT_CARDS) self.__elastic_customer_cards_map = Elastic( settings. AWS_ELASTICSEARCH_PURCHASE_CUSTOMER_CREDIT_CARDS_CUSTOMER_MAP, settings. AWS_ELASTICSEARCH_PURCHASE_CUSTOMER_CREDIT_CARDS_CUSTOMER_MAP) self.__reflector = Reflector() def __restore(self, row: dict) -> CreditCard: card = self.__reflector.construct( CreditCard, { self.__class__.__ENTITY_PROPERTY_TOKEN: row['token'], self.__class__.__ENTITY_PROPERTY_CUSTOMER_ID: row['customer_id'], self.__class__.__ENTITY_PROPERTY_BRAND: row['brand'], self.__class__.__ENTITY_PROPERTY_NUMBER_HIDDEN: row['number_hidden'], self.__class__.__ENTITY_PROPERTY_EXPIRES: (datetime.date( year=int('20' + row['expires'][0:2]), month=12, day=31)) if int(row['expires'][2:4]) == 12 else (datetime.date(year=int('20' + row['expires'][0:2]), month=int(row['expires'][2:4]) + 1, day=1) - datetime.timedelta(days=1)), self.__class__.__ENTITY_PROPERTY_HOLDER_NAME: row['holder_name'], self.__class__.__ENTITY_PROPERTY_IS_VERIFIED: row['is_verified'], self.__class__.__ENTITY_PROPERTY_CREATED_AT: datetime.datetime.strptime(row['created_at'], '%Y-%m-%d %H:%M:%S') }) return card def get_by_token(self, token: str) -> Optional[CreditCard]: if not isinstance(token, str): raise ArgumentTypeException(self.get_by_token, 'token', token) elif not token.strip(): raise ArgumentCannotBeEmptyException(self.get_by_token, 'token') row = self.__elastic_cards.get_data(token) return self.__restore(row) if row else None def get_all_by_customer(self, customer_id: str) -> Tuple[CreditCard]: if not isinstance(customer_id, str): raise ArgumentTypeException(self.get_all_by_customer, 'customer_id', customer_id) elif not customer_id.strip(): raise ArgumentCannotBeEmptyException(self.get_all_by_customer, 'customer_id') customer_cards_map = self.__elastic_customer_cards_map.get_data( customer_id) tokens = json.loads( customer_cards_map['tokens_json']) if customer_cards_map else [] result = [self.get_by_token(token) for token in tokens] result = [card for card in result if card] if len(result) != len(tokens): raise ValueError( 'Incorrect cards set for customer #{}: existed cards - {}, tokens in map - {}' .format(customer_id, len(result), len(tokens))) return tuple(result) def save(self, card: CreditCard) -> None: if not isinstance(card, CreditCard): raise ArgumentTypeException(self.save, 'card', card) data = self.__reflector.extract(card, [ self.__class__.__ENTITY_PROPERTY_TOKEN, self.__class__.__ENTITY_PROPERTY_CUSTOMER_ID, self.__class__.__ENTITY_PROPERTY_BRAND, self.__class__.__ENTITY_PROPERTY_NUMBER_HIDDEN, self.__class__.__ENTITY_PROPERTY_EXPIRES, self.__class__.__ENTITY_PROPERTY_HOLDER_NAME, self.__class__.__ENTITY_PROPERTY_IS_VERIFIED, self.__class__.__ENTITY_PROPERTY_CREATED_AT, ]) token = data[self.__class__.__ENTITY_PROPERTY_TOKEN] customer_id = data[self.__class__.__ENTITY_PROPERTY_CUSTOMER_ID] if self.__elastic_cards.get_data(token): self.__elastic_cards.update_data( token, { 'doc': { 'is_verified': data[self.__class__.__ENTITY_PROPERTY_IS_VERIFIED] } }) else: self.__elastic_cards.create( token, { 'token': data[self.__class__.__ENTITY_PROPERTY_TOKEN], 'customer_id': data[self.__class__.__ENTITY_PROPERTY_CUSTOMER_ID], 'brand': data[self.__class__.__ENTITY_PROPERTY_BRAND], 'number_hidden': data[self.__class__.__ENTITY_PROPERTY_NUMBER_HIDDEN], 'expires': data[self.__class__.__ENTITY_PROPERTY_EXPIRES].strftime( '%y%m'), 'holder_name': data[self.__class__.__ENTITY_PROPERTY_HOLDER_NAME], 'is_verified': data[self.__class__.__ENTITY_PROPERTY_IS_VERIFIED], 'created_at': data[self.__class__.__ENTITY_PROPERTY_CREATED_AT].strftime( '%Y-%m-%d %H:%M:%S') }) # Elastic can search by attributes only after 1 second from last update. # We need all data, when we are searching by customer_id, # so in this case we will lost fresh data, if search directly after creation of new card. # In this case we need to use another index and get data by elastic doc_id. customer_cards_map = self.__elastic_customer_cards_map.get_data( customer_id) if customer_cards_map: tokens = json.loads(customer_cards_map['tokens_json']) tokens.append(token) self.__elastic_customer_cards_map.update_data( customer_id, {'doc': { 'tokens_json': json.dumps(tokens) }}) else: self.__elastic_customer_cards_map.create( customer_id, {'tokens_json': json.dumps([token])}) def remove(self, card: CreditCard) -> None: if not isinstance(card, CreditCard): raise ArgumentTypeException(self.remove, 'card', card) if not self.__elastic_cards.get_data(card.token): raise ArgumentValueException('Card #{} is already Removed!'.format( card.token)) self.__elastic_cards.delete_by_id(card.token) customer_cards_map = self.__elastic_customer_cards_map.get_data( card.customer_id) tokens = json.loads(customer_cards_map['tokens_json']) tokens = [token for token in tokens if token != card.token] self.__elastic_customer_cards_map.update_data( card.customer_id, {'doc': { 'tokens_json': json.dumps(tokens) }})
class FbucksChargeSqsHandler(SqsHandlerInterface): # @TODO : REFACTORING !!! currently we are working with raw data def __init__(self): self.__orders_storage = OrderStorageImplementation() self.__logger = Logger() # """ # curl -X DELETE localhost:9200/fbucks_handled_orders # curl -X PUT localhost:9200/fbucks_handled_orders -H "Content-Type: application/json" -d'{ # "mappings": { # "fbucks_handled_orders": { # "properties": { # "handled_at": {"type": "date", "format": "yyyy-MM-dd HH:mm:ss"} # } # } # } # }' # """ # self.__fbucks_handled_orders_elastic = Elastic( # settings.AWS_ELASTICSEARCH_FBUCKS_HANDLED_ORDERS, # settings.AWS_ELASTICSEARCH_FBUCKS_HANDLED_ORDERS, # ) self.__fbucks_handled_orders_dynamo_db = DynamoModel( settings.AWS_DYNAMODB_CMS_TABLE_NAME) self.__fbucks_handled_orders_dynamo_db.PARTITION_KEY = 'PURCHASE_FBUCKS_REWARD_HANDLED_ORDERS' # Attention! # We can get current customer's amount as a sum of all changes by customer_id # But theoretically elastic can not be in time with index update (1 second) between requests. # So there is another index to store amount value. """ curl -X DELETE localhost:9200/fbucks_customer_amount curl -X PUT localhost:9200/fbucks_customer_amount -H "Content-Type: application/json" -d'{ "mappings": { "fbucks_customer_amount": { "properties": { "amount": {"type": "integer"} } } } }' curl -X DELETE localhost:9200/fbucks_customer_amount_changes curl -X PUT localhost:9200/fbucks_customer_amount_changes -H "Content-Type: application/json" -d'{ "mappings": { "fbucks_customer_amount_changes": { "properties": { "customer_id": {"type": "keyword"}, "amount": {"type": "integer"}, "changed_at": {"type": "date", "format": "yyyy-MM-dd HH:mm:ss"}, "order_number": {"type": "keyword"} } } } }' """ self.__fbucks_customer_amount_elastic = Elastic( settings.AWS_ELASTICSEARCH_FBUCKS_CUSTOMER_AMOUNT, settings.AWS_ELASTICSEARCH_FBUCKS_CUSTOMER_AMOUNT, ) self.__fbucks_customer_amount_changes_elastic = Elastic( settings.AWS_ELASTICSEARCH_FBUCKS_CUSTOMER_AMOUNT_CHANGES, settings.AWS_ELASTICSEARCH_FBUCKS_CUSTOMER_AMOUNT_CHANGES, ) self.__customer_storage = CustomerStorageImplementation() self.__messages_storage = MessageStorageImplementation() def handle(self, sqs_message: SqsMessage) -> None: import uuid import datetime from chalicelib.libs.purchase.core import Order order_number_values = sqs_message.message_data['order_numbers'] for order_number_value in order_number_values: try: now_string = datetime.datetime.now().strftime( "%Y-%m-%d %H:%M:%S") # skip duplicates # if self.__fbucks_handled_orders_elastic.get_data(order_number_value): if self.__fbucks_handled_orders_dynamo_db.find_item( order_number_value): self.__logger.log_simple( '{}: Fbucks for order #{} already earned!'.format( self.handle.__qualname__, order_number_value)) continue # ignore orders without fbucks amounts order = self.__orders_storage.load( Order.Number(order_number_value)) fbucks_amount = order.total_fbucks_earnings.value if fbucks_amount == 0: # remember order as handled # self.__fbucks_handled_orders_elastic.create(order_number_value, {'handled_at': now_string}) self.__fbucks_handled_orders_dynamo_db.put_item( order_number_value, {'handled_at': now_string}) continue # earn fbucks self.__fbucks_customer_amount_elastic.update_data( order.customer_id.value, { 'script': 'ctx._source.amount += ' + str(fbucks_amount), 'upsert': { 'amount': fbucks_amount, } }) self.__fbucks_customer_amount_changes_elastic.create( str(uuid.uuid4()) + str(order.customer_id.value), { "customer_id": order.customer_id.value, "amount": +fbucks_amount, "changed_at": now_string, "order_number": order_number_value, }) # remember order as handled # self.__fbucks_handled_orders_elastic.create(order_number_value, {'handled_at': now_string}) self.__fbucks_handled_orders_dynamo_db.put_item( order_number_value, {'handled_at': now_string}) # notify (silently) try: customer = self.__customer_storage.get_by_id( order.customer_id) self.__messages_storage.save( Message( str(uuid.uuid4()), customer.email.value, 'F-Bucks has been Earned!', 'You have earned {} F-Bucks by your Order #{}'. format(fbucks_amount, order.number.value))) except BaseException as e: self.__logger.log_exception(e) except BaseException as e: self.__logger.log_exception(e)
class _CustomerTiersElasticStorage(CustomerTierStorageInterface): __ENTITY_PROPERTY_ID = '__id' __ENTITY_PROPERTY_NAME = '__name' __ENTITY_PROPERTY_CREDIT_BACK_PERCENT = '__credit_back_percent' __ENTITY_PROPERTY_SPENT_AMOUNT_MIN = 'spent_amount_min' __ENTITY_PROPERTY_SPENT_AMOUNT_MAX = 'spent_amount_max' __ENTITY_PROPERTY_IS_DELETED = '__is_deleted' def __init__(self): """ curl -X DELETE localhost:9200/customer_tiers_tiers curl -X PUT localhost:9200/customer_tiers_tiers -H "Content-Type: application/json" -d'{ "mappings": { "customer_tiers_tiers": { "properties": { "id": {"type": "integer"}, "name": {"type": "keyword"}, "credit_back_percent": {"type": "integer"}, "spent_amount_min": {"type": "integer"}, "spent_amount_max": {"type": "integer"}, "is_deleted": {"type": "boolean"} } } } }' """ self.__elastic = Elastic( settings.AWS_ELASTICSEARCH_CUSTOMER_TIERS_TIERS, settings.AWS_ELASTICSEARCH_CUSTOMER_TIERS_TIERS ) self.__reflector = Reflector() def save(self, entity: CustomerTier) -> None: entity_data = self.__reflector.extract(entity, [ self.__class__.__ENTITY_PROPERTY_ID, self.__class__.__ENTITY_PROPERTY_NAME, self.__class__.__ENTITY_PROPERTY_CREDIT_BACK_PERCENT, self.__class__.__ENTITY_PROPERTY_SPENT_AMOUNT_MIN, self.__class__.__ENTITY_PROPERTY_SPENT_AMOUNT_MAX, self.__class__.__ENTITY_PROPERTY_IS_DELETED ]) document_id = entity_data[self.__class__.__ENTITY_PROPERTY_ID].value document_data = { 'id': entity_data[self.__class__.__ENTITY_PROPERTY_ID].value, 'name': entity_data[self.__class__.__ENTITY_PROPERTY_NAME].value, 'credit_back_percent': entity_data[self.__class__.__ENTITY_PROPERTY_CREDIT_BACK_PERCENT].value, 'spent_amount_min': entity_data[self.__class__.__ENTITY_PROPERTY_SPENT_AMOUNT_MIN], 'spent_amount_max': entity_data[self.__class__.__ENTITY_PROPERTY_SPENT_AMOUNT_MAX], 'is_deleted': entity_data[self.__class__.__ENTITY_PROPERTY_IS_DELETED], } if self.__elastic.get_data(document_id): self.__elastic.update_data(document_id, {'doc': document_data}) else: self.__elastic.create(document_id, document_data) def __create_entity(self, row: dict) -> CustomerTier: entity = self.__reflector.construct(CustomerTier, { self.__class__.__ENTITY_PROPERTY_ID: Id(str(row['id'])), self.__class__.__ENTITY_PROPERTY_NAME: Name(row['name']), self.__class__.__ENTITY_PROPERTY_CREDIT_BACK_PERCENT: Percentage(int(row['credit_back_percent'])), self.__class__.__ENTITY_PROPERTY_SPENT_AMOUNT_MIN: int(row['spent_amount_min']), self.__class__.__ENTITY_PROPERTY_SPENT_AMOUNT_MAX: int(row['spent_amount_max']), self.__class__.__ENTITY_PROPERTY_IS_DELETED: row['is_deleted'], }) return entity def get_by_id(self, tier_id: Id) -> Optional[CustomerTier]: if not isinstance(tier_id, Id): raise ArgumentTypeException(self.get_by_id, 'tier_id', tier_id) row = self.__elastic.get_data(tier_id.value) return self.__create_entity(row) if row else None def get_all(self) -> Tuple[CustomerTier]: rows = self.__elastic.post_search({'query': {'match_all': {}}}).get('hits', {}).get('hits') result = [self.__create_entity(row['_source']) for row in rows] result = [entity for entity in result if not entity.is_deleted] result = tuple(result) return result def get_neutral(self) -> CustomerTier: for tier in self.get_all(): if tier.is_neutral: return tier else: raise ApplicationLogicException('Neutral Tier does not exist!')