def update_product_availability_facility_data(org_summary): # product availability facility = Location.get(docid=org_summary.supply_point) assert facility.location_type == "FACILITY" prods = Product.ids_by_domain(facility.domain) for p in prods: product_data, created = ProductAvailabilityData.objects.get_or_create(product=p, supply_point=facility._id, date=org_summary.date) if created: # set defaults product_data.total = 1 previous_reports = ProductAvailabilityData.objects.filter(product=p, supply_point=facility._id, date__lt=org_summary.date) if previous_reports.count(): prev = previous_reports.order_by("-date")[0] product_data.with_stock = prev.with_stock product_data.without_stock = prev.without_stock product_data.without_data = prev.without_data # otherwise we use the defaults else: product_data.with_stock = 0 product_data.without_stock = 0 product_data.without_data = 1 product_data.save() assert (product_data.with_stock + product_data.without_stock + product_data.without_data) == 1, \ "bad product data config"
def handle(self, *args, **options): for domain in Domain.get_all(): if domain['commtrack_enabled']: fields_definition = CustomDataFieldsDefinition.get_or_create( domain['name'], 'ProductFields' ) product_ids = Product.ids_by_domain(domain['name']) existing_field_slugs = set( [field.slug for field in fields_definition.fields] ) for product in iter_docs(Product.get_db(), product_ids): product_data = product.get('product_data', {}) for key in product_data.keys(): if key and key not in existing_field_slugs: existing_field_slugs.add(key) fields_definition.fields.append(CustomDataField( slug=key, label=key, is_required=False )) # Only save a definition for domains which use custom product data if fields_definition.fields: fields_definition.save()
def ews_clear_stock_data_task(domain): StockTransaction.objects.filter(report__domain=domain).delete() StockReport.objects.filter(domain=domain).delete() products = Product.ids_by_domain(domain) StockState.objects.filter(product_id__in=products).delete()
def _get_products(domain): for p_doc in iter_docs(Product.get_db(), Product.ids_by_domain(domain)): yield Product.wrap(p_doc)
def _init_with_product(supply_point, date): for p in Product.ids_by_domain(supply_point.domain): ProductAvailabilityData.objects.get_or_create(supply_point=supply_point._id, date=date, product=p)
def process_non_facility_warehouse_data(org, start_date, end_date, strict=True): facs = get_nested_children(org) fac_ids = [f._id for f in facs] logging.info("processing non-facility %s (%s), %s children" % (org.name, str(org._id), len(facs))) for year, month in months_between(start_date, end_date): window_date = datetime(year, month, 1) org_summary = OrganizationSummary.objects.get_or_create(supply_point=org._id, date=window_date)[0] org_summary.total_orgs = len(facs) sub_summaries = OrganizationSummary.objects.filter(date=window_date, supply_point__in=fac_ids) subs_with_lead_time = [s for s in sub_summaries if s.average_lead_time_in_days] # lead times if subs_with_lead_time: days_sum = sum([s.average_lead_time_in_days for s in subs_with_lead_time]) org_summary.average_lead_time_in_days = days_sum / len(subs_with_lead_time) else: org_summary.average_lead_time_in_days = 0 org_summary.save() # product availability prods = Product.ids_by_domain(org.domain) for p in prods: product_data = ProductAvailabilityData.objects.get_or_create(product=p, supply_point=org._id, date=window_date)[0] sub_prods = ProductAvailabilityData.objects.filter(product=p, supply_point__in=fac_ids, date=window_date) product_data.total = sum([p.total for p in sub_prods]) if strict: assert product_data.total == len(facs), \ "total should match number of sub facilities" product_data.with_stock = sum([p.with_stock for p in sub_prods]) product_data.without_stock = sum([p.without_stock for p in sub_prods]) product_data.without_data = product_data.total - product_data.with_stock - product_data.without_stock product_data.save() dg = DeliveryGroups(month=month, facs=facs) for type in NEEDED_STATUS_TYPES: gsum = GroupSummary.objects.get_or_create(org_summary=org_summary, title=type)[0] sub_sums = GroupSummary.objects.filter(title=type, org_summary__in=sub_summaries).all() # TODO: see if moving the aggregation to the db makes it # faster, if this is slow gsum.total = sum([s.total for s in sub_sums]) gsum.responded = sum([s.responded for s in sub_sums]) gsum.on_time = sum([s.on_time for s in sub_sums]) gsum.complete = sum([s.complete for s in sub_sums]) # gsum.missed_response = sum([s.missed_response for s in sub_sums]) gsum.save() if type == SupplyPointStatusTypes.DELIVERY_FACILITY: expected = len(dg.delivering()) elif type == SupplyPointStatusTypes.R_AND_R_FACILITY: expected = len(dg.submitting()) elif type == SupplyPointStatusTypes.SOH_FACILITY \ or type == SupplyPointStatusTypes.SUPERVISION_FACILITY: expected = len(facs) if gsum.total != expected: logging.info("expected %s but was %s for %s" % (expected, gsum.total, gsum)) for alert_type in ['rr_not_submitted', 'delivery_not_received', 'soh_not_responding', 'rr_not_responded', 'delivery_not_responding']: sub_alerts = Alert.objects.filter(supply_point__in=fac_ids, date=window_date, type=alert_type) aggregate_response_alerts(org, window_date, sub_alerts, alert_type)
def _iter_product_rows(domain): for p_doc in iter_docs(Product.get_db(), Product.ids_by_domain(domain)): p = Product.wrap(p_doc) yield p.to_csv()
def clear_stock_data_task(): StockTransaction.objects.filter(report__domain='ilsgateway-test-1').delete() StockReport.objects.filter(domain='ilsgateway-test-1').delete() products = Product.ids_by_domain('ilsgateway-test-1') StockState.objects.filter(product_id__in=products).delete()
def _get_products(domain): for p_doc in iter_docs(Product.get_db(), Product.ids_by_domain(domain)): # filter out archived products from export if not ('is_archived' in p_doc and p_doc['is_archived']): yield Product.wrap(p_doc)