def sync_stock_transactions_for_facility(domain, endpoint, facility, checkpoint, date, limit=1000, offset=0): """ Syncs stock data from StockTransaction objects in ILSGateway to StockTransaction objects in HQ """ has_next = True next_url = "" section_id = 'stock' supply_point = facility case = get_supply_point_case_in_domain_by_id(domain, supply_point) if not case: return location_id = case.location_id save_stock_data_checkpoint(checkpoint, 'stock_transaction', limit, offset, date, location_id, True) products_saved = set() while has_next: meta, stocktransactions = endpoint.get_stocktransactions( next_url_params=next_url, limit=limit, offset=offset, filters={ 'supply_point': supply_point, 'date__gte': date, 'date__lte': checkpoint.start_date } ) # set the checkpoint right before the data we are about to process meta_limit = meta.get('limit') or limit meta_offset = meta.get('offset') or offset save_stock_data_checkpoint( checkpoint, 'stock_transaction', meta_limit, meta_offset, date, location_id, True ) transactions_to_add = [] with transaction.atomic(): for stocktransaction in stocktransactions: transactions = sync_stock_transaction(stocktransaction, domain, case, bulk=True) transactions_to_add.extend(transactions) products_saved.update(map(lambda x: x.product_id, transactions)) if transactions_to_add: # Doesn't send signal StockTransaction.objects.bulk_create(transactions_to_add) if not meta.get('next', False): has_next = False else: next_url = meta['next'].split('?')[1] for product in products_saved: # if we saved anything rebuild the stock state object by firing the signal # on the last transaction for each product last_st = StockTransaction.latest(case.get_id, section_id, product) update_stock_state_for_transaction(last_st)
def sync_stock_transactions_for_facility(domain, endpoint, facility, checkpoint, date, limit=1000, offset=0): """ Syncs stock data from StockTransaction objects in ILSGateway to StockTransaction objects in HQ """ has_next = True next_url = "" section_id = 'stock' supply_point = facility case = get_supply_point_case_in_domain_by_id(domain, supply_point) if not case: return location_id = case.location_id save_stock_data_checkpoint(checkpoint, 'stock_transaction', limit, offset, date, location_id, True) products_saved = set() while has_next: meta, stocktransactions = endpoint.get_stocktransactions( next_url_params=next_url, limit=limit, offset=offset, filters={ 'supply_point': supply_point, 'date__gte': date, 'date__lte': checkpoint.start_date } ) # set the checkpoint right before the data we are about to process meta_limit = meta.get('limit') or limit meta_offset = meta.get('offset') or offset save_stock_data_checkpoint( checkpoint, 'stock_transaction', meta_limit, meta_offset, date, location_id, True ) transactions_to_add = [] with transaction.atomic(): for stocktransaction in stocktransactions: transactions = sync_stock_transaction(stocktransaction, domain, case, bulk=True) transactions_to_add.extend(transactions) products_saved.update(map(lambda x: x.product_id, transactions)) if transactions_to_add: # Doesn't send signal StockTransaction.objects.bulk_create(transactions_to_add) if not meta.get('next', False): has_next = False else: next_url = meta['next'].split('?')[1] for product in products_saved: # if we saved anything rebuild the stock state object by firing the signal # on the last transaction for each product last_st = StockTransaction.latest(case.get_id, section_id, product) update_stock_state_for_transaction(last_st)
def recalculate_domain_consumption(domain): """ Given a domain, recalculate all saved consumption settings in that domain. """ # note: might get slow as this gets huge found_doc_ids = DocDomainMapping.objects.filter(domain_name=domain, doc_type="CommCareCase").values_list( "doc_id", flat=True ) products = Product.by_domain(domain) for supply_point_id in found_doc_ids: for product in products: filtered_transactions = StockTransaction.objects.filter( case_id=supply_point_id, product_id=product._id, section_id=const.SECTION_TYPE_STOCK ).order_by("-report__date", "-pk") if filtered_transactions: update_stock_state_for_transaction(filtered_transactions[0])
def recalculate_domain_consumption(domain): """ Given a domain, recalculate all saved consumption settings in that domain. """ # note: might get slow as this gets huge found_doc_ids = DocDomainMapping.objects.filter( domain_name=domain, doc_type='CommCareCase', ).values_list('doc_id', flat=True) products = Product.by_domain(domain) for supply_point_id in found_doc_ids: for product in products: filtered_transactions = StockTransaction.get_ordered_transactions_for_stock( supply_point_id, const.SECTION_TYPE_STOCK, product._id ) if filtered_transactions: update_stock_state_for_transaction(filtered_transactions[0])
def recalculate_domain_consumption(domain): """ Given a domain, recalculate all saved consumption settings in that domain. """ # note: might get slow as this gets huge found_doc_ids = DocDomainMapping.objects.filter( domain_name=domain, doc_type='CommCareCase', ).values_list('doc_id', flat=True) products = Product.by_domain(domain) for supply_point_id in found_doc_ids: for product in products: filtered_transactions = StockTransaction.objects.filter( case_id=supply_point_id, product_id=product._id, section_id=const.SECTION_TYPE_STOCK, ).order_by('-report__date', '-pk') if filtered_transactions: update_stock_state_for_transaction(filtered_transactions[0])