def test_unscheduled_deliveries(self): data_import = DataImport( status=ImportStatus.active, data_file=DataFile.PPE_ORDERINGCHARTS_DATE_XLSX, file_checksum="123", ) data_import.save() items = SourcingRow( item=dc.Item.gown, quantity=2000, vendor="Gown Sellers Ltd", description="Some gowns", delivery_day_1=datetime.strptime("2020-04-12", "%Y-%m-%d") - timedelta(days=5), delivery_day_1_quantity=5, delivery_day_2=datetime.strptime("2020-04-12", "%Y-%m-%d") + timedelta(days=1), delivery_day_2_quantity=1000, status="Completed", received_quantity=0, raw_data={}, ).to_objects(ErrorCollector()) for item in items: item.source = data_import item.save() purchase = Purchase.objects.filter(item=dc.Item.gown) self.assertEqual(purchase.count(), 1) self.assertEqual(purchase.first().unscheduled_quantity, 995)
def finalize_import(data_import: DataImport): current_active = DataImport.objects.filter( data_file=data_import.data_file, status=ImportStatus.active ) if current_active.count() > 1: raise DataImportError("Multiple active imports") current_active.update(status=ImportStatus.replaced) data_import.status = ImportStatus.active data_import.save()
def setUp(self) -> None: self.data_import = DataImport( status=ImportStatus.active, data_file=DataFile.PPE_ORDERINGCHARTS_DATE_XLSX, file_checksum="123", ) self.data_import.save() items = SourcingRow( item=dc.Item.gown, quantity=1005, vendor="Gown Sellers Ltd", description="Some gowns", delivery_day_1=datetime.strptime("2020-04-12", "%Y-%m-%d") - timedelta(days=5), delivery_day_1_quantity=5, status="Completed", received_quantity=0, delivery_day_2=datetime.strptime("2020-04-12", "%Y-%m-%d") + timedelta(days=1), delivery_day_2_quantity=1000, raw_data={}, ).to_objects(ErrorCollector()) inventory = [ Inventory( item=dc.Item.gown, quantity=100, as_of=datetime(year=2020, day=11, month=4), raw_data={}, ), # this one is old and should be superceded Inventory( item=dc.Item.gown, quantity=200, as_of=datetime(year=2020, day=10, month=4), raw_data={}, ), ] f = Facility(name="Generic Hospital", tpe=dc.FacilityType.hospital) items.append(f) deliveries = [ FacilityDelivery( date=datetime(year=2020, day=10, month=4), quantity=1234, facility=f, item=dc.Item.gown, ), FacilityDelivery( date=datetime(year=2020, day=10, month=4), quantity=123, facility=f, item=dc.Item.faceshield, ), ] items += deliveries items += inventory for item in items: item.source = self.data_import item.save() items = DemandRow( item=dc.Item.gown, demand=2457000, week_start_date=datetime.strptime("2020-04-11", "%Y-%m-%d"), week_end_date=datetime.strptime("2020-04-17", "%Y-%m-%d"), raw_data={}, ).to_objects(ErrorCollector()) for item in items: item.source = self.data_import item.save()
class TestAssetRollup(TestCase): def setUp(self) -> None: self.data_import = DataImport( status=ImportStatus.active, data_file=DataFile.PPE_ORDERINGCHARTS_DATE_XLSX, file_checksum="123", ) self.data_import.save() items = SourcingRow( item=dc.Item.gown, quantity=1005, vendor="Gown Sellers Ltd", description="Some gowns", delivery_day_1=datetime.strptime("2020-04-12", "%Y-%m-%d") - timedelta(days=5), delivery_day_1_quantity=5, status="Completed", received_quantity=0, delivery_day_2=datetime.strptime("2020-04-12", "%Y-%m-%d") + timedelta(days=1), delivery_day_2_quantity=1000, raw_data={}, ).to_objects(ErrorCollector()) inventory = [ Inventory( item=dc.Item.gown, quantity=100, as_of=datetime(year=2020, day=11, month=4), raw_data={}, ), # this one is old and should be superceded Inventory( item=dc.Item.gown, quantity=200, as_of=datetime(year=2020, day=10, month=4), raw_data={}, ), ] f = Facility(name="Generic Hospital", tpe=dc.FacilityType.hospital) items.append(f) deliveries = [ FacilityDelivery( date=datetime(year=2020, day=10, month=4), quantity=1234, facility=f, item=dc.Item.gown, ), FacilityDelivery( date=datetime(year=2020, day=10, month=4), quantity=123, facility=f, item=dc.Item.faceshield, ), ] items += deliveries items += inventory for item in items: item.source = self.data_import item.save() items = DemandRow( item=dc.Item.gown, demand=2457000, week_start_date=datetime.strptime("2020-04-11", "%Y-%m-%d"), week_end_date=datetime.strptime("2020-04-17", "%Y-%m-%d"), raw_data={}, ).to_objects(ErrorCollector()) for item in items: item.source = self.data_import item.save() @freeze_time("2020-04-12") def test_rollup(self): today = datetime(2020, 4, 12) rollup = aggregations.asset_rollup_legacy(today - timedelta(days=27), today) self.assertEqual(len(rollup), len(dc.Item)) # demand of 20 = 5 in the last week * 4 weeks in the period self.assertEqual( rollup[dc.Item.gown], AssetRollup( asset=dc.Item.gown, total_cols=AggColumn.all(), inventory=100, demand_src={DemandSrc.real_demand}, demand=7654622, ordered=5, ), ) # Turn off use of hospitalization projection rollup = aggregations.asset_rollup_legacy( today - timedelta(days=27), today, use_hospitalization_projection=False, ) self.assertEqual( rollup[dc.Item.gown], AssetRollup( asset=dc.Item.gown, total_cols=AggColumn.all(), inventory=100, demand_src={DemandSrc.real_demand}, demand=2457000 * 4, ordered=5, ), ) # should fallback to past deliveries self.assertEqual( rollup[dc.Item.faceshield], AssetRollup( asset=dc.Item.faceshield, total_cols=AggColumn.all(), inventory=0, demand_src={DemandSrc.past_deliveries}, demand=123 * 4, ordered=0, ), ) # Turn of use off hospitalization projection & real demand future_rollup = aggregations.asset_rollup_legacy( today, today + timedelta(days=27), use_hospitalization_projection=False, use_real_demand=False, ) # Fallback to delivery self.assertEqual( future_rollup[dc.Item.gown], AssetRollup( asset=dc.Item.gown, total_cols=AggColumn.all(), demand=1234 * 4, ordered=1000, demand_src={DemandSrc.past_deliveries}, inventory=100, ), ) def test_mayoral_rollup(self): today = datetime(2020, 4, 12) rollup = aggregations.asset_rollup_legacy( today - timedelta(days=27), today, rollup_fn=lambda row: row.to_mayoral_category(), ) self.assertEqual(len(rollup), len(dc.MayoralCategory)) self.assertEqual( rollup[dc.MayoralCategory.iso_gowns].asset, dc.MayoralCategory.iso_gowns ) def test_only_aggregate_active_items(self): today = datetime(2020, 4, 12) self.data_import.status = ImportStatus.replaced self.data_import.save() try: self.assertEqual(aggregations.known_recent_demand(), {}) rollup = aggregations.asset_rollup_legacy(today - timedelta(days=28), today) self.assertEqual( rollup[dc.Item.gown], AssetRollup( asset=dc.Item.gown, total_cols=AggColumn.all(), demand=0, ordered=0 ), ) finally: self.data_import.status = ImportStatus.active self.data_import.save()
def import_data( path: Path, mappings: List[xlsx_utils.SheetMapping], current_as_of: date, user_provided_filename: Optional[str], uploaded_by: Optional[str] = None, overwrite_in_prog=False, ): error_collector = ErrorCollector() data_file = {mapping.data_file for mapping in mappings} if len(data_file) != 1: raise ImportError( "Something is wrong, can't import from two different files..." ) data_file = mappings[0].data_file in_progress = import_in_progress(data_file) if in_progress.count() > 0: if overwrite_in_prog: in_progress.update(status=ImportStatus.replaced) else: raise ImportInProgressError(in_progress.first().id) with open(path, "rb") as f: checksum = hashlib.sha256(f.read()) uploaded_by = uploaded_by or "" data_import = DataImport( status=ImportStatus.candidate, current_as_of=current_as_of, data_file=data_file, uploaded_by=uploaded_by, file_checksum=checksum, file_name=user_provided_filename or path.name, ) data_import.save() for mapping in mappings: try: data = import_xlsx(path, mapping, error_collector) data = list(data) # there are a lot of deliveries, pull them out for bulk import deliveries = [] for item in data: try: for obj in item.to_objects(error_collector): obj.source = data_import if isinstance(obj, FacilityDelivery): deliveries.append(obj) else: obj.save() except Exception as ex: error_collector.report_error( f"Failure importing row. This is a bug: {ex}" ) sentry_sdk.capture_exception(ex) FacilityDelivery.objects.bulk_create(deliveries) except Exception: print(f"Failure importing {path}, mapping: {mapping.sheet_name}") raise print(f"Errors: ") error_collector.dump() return data_import