Пример #1
0
def sync_supply_point_status(domain, endpoint, facility, checkpoint, date, limit=100, offset=0):
    has_next = True
    next_url = ""

    while has_next:
        meta, supply_point_statuses = endpoint.get_supplypointstatuses(
            domain,
            limit=limit,
            offset=offset,
            next_url_params=next_url,
            filters=dict(supply_point=facility, status_date__gte=date),
            facility=facility
        )
        # set the checkpoint right before the data we are about to process
        if not supply_point_statuses:
            return None
        location_id = SQLLocation.objects.get(domain=domain, external_id=facility).location_id
        save_stock_data_checkpoint(checkpoint,
                                   'supply_point_status',
                                   meta.get('limit') or limit,
                                   meta.get('offset') or offset, date, location_id, True)
        for supply_point_status in supply_point_statuses:
            process_supply_point_status(supply_point_status, domain, location_id)

        if not meta.get('next', False):
            has_next = False
        else:
            next_url = meta['next'].split('?')[1]
Пример #2
0
def sync_supply_point_status(domain, endpoint, facility, checkpoint, date, limit=100, offset=0):
    has_next = True
    next_url = ""

    while has_next:
        meta, supply_point_statuses = endpoint.get_supplypointstatuses(
            domain,
            limit=limit,
            offset=offset,
            next_url_params=next_url,
            filters=dict(supply_point=facility, status_date__gte=date),
            facility=facility
        )
        save_stock_data_checkpoint(checkpoint,
                                   'supply_point_status',
                                   meta.get('limit') or limit,
                                   meta.get('offset') or offset, date, facility, True)
        for sps in supply_point_statuses:
            try:
                SupplyPointStatus.objects.get(external_id=sps.external_id)
            except SupplyPointStatus.DoesNotExist:
                sps.save()

        if not meta.get('next', False):
            has_next = False
        else:
            next_url = meta['next'].split('?')[1]
Пример #3
0
def sync_delivery_group_report(domain, endpoint, facility, checkpoint, date, limit=100, offset=0):
    has_next = True
    next_url = ""
    while has_next:
        meta, delivery_group_reports = endpoint.get_deliverygroupreports(
            domain,
            limit=limit,
            offset=offset,
            next_url_params=next_url,
            filters=dict(supply_point=facility, report_date__gte=date),
            facility=facility
        )
        location_id = SQLLocation.objects.get(domain=domain, external_id=facility).location_id
        # set the checkpoint right before the data we are about to process
        save_stock_data_checkpoint(checkpoint,
                                   'delivery_group',
                                   meta.get('limit') or limit,
                                   meta.get('offset') or offset,
                                   date, location_id, True)
        for dgr in delivery_group_reports:
            try:
                DeliveryGroupReport.objects.get(external_id=dgr.external_id, location_id=location_id)
            except DeliveryGroupReport.DoesNotExist:
                dgr.save()

        if not meta.get('next', False):
            has_next = False
        else:
            next_url = meta['next'].split('?')[1]
Пример #4
0
def stock_data_task(api_object):
    # checkpoint logic
    start_date = datetime.today()
    default_api = api_object.apis[0][0]
    checkpoint, _ = StockDataCheckpoint.objects.get_or_create(
        domain=api_object.domain,
        defaults={
            "api": default_api,
            "date": None,
            "limit": 1000,
            "offset": 0,
            "location": None,
            "start_date": start_date,
        },
    )

    if not checkpoint.api:
        checkpoint.api = default_api

    if not checkpoint.start_date:
        checkpoint.start_date = start_date
        checkpoint.save()

    if not api_object.all_stock_data:
        facilities = api_object.test_facilities
    else:
        facilities = api_object.get_ids()
    if checkpoint.location:
        external_id = api_object.get_last_processed_location(checkpoint)
        if external_id:
            facilities = list(itertools.dropwhile(lambda x: int(x) != int(external_id), facilities))
            process_facility_task(api_object, facilities[0], start_from=checkpoint.api)
            facilities = facilities[1:]

    if not checkpoint.date or checkpoint.location:
        # use subtasks only during initial migration
        facilities_chunked_list = chunked(facilities, 5)
        for chunk in facilities_chunked_list:
            api_object.process_data(process_facility_task, chunk)
    else:
        offset = checkpoint.offset
        for stock_api in itertools.dropwhile(lambda x: x.name != checkpoint.api, api_object.get_stock_apis_objects()):
            stock_api.add_date_filter(checkpoint.date, checkpoint.start_date)
            synchronization(
                stock_api,
                checkpoint,
                checkpoint.date,
                1000,
                offset,
                params={"domain": api_object.domain},
                domain=api_object.domain,
                atomic=True,
            )
            offset = 0

    checkpoint = StockDataCheckpoint.objects.get(domain=api_object.domain)
    save_stock_data_checkpoint(checkpoint, default_api, 1000, 0, checkpoint.start_date, None, False)
    checkpoint.start_date = None
    checkpoint.save()
Пример #5
0
def stock_data_task(api_object):
    # checkpoint logic
    start_date = datetime.today()
    default_api = api_object.apis[0][0]
    checkpoint, _ = StockDataCheckpoint.objects.get_or_create(domain=api_object.domain, defaults={
        'api': default_api,
        'date': None,
        'limit': 1000,
        'offset': 0,
        'location': None,
        'start_date': start_date
    })

    if not checkpoint.api:
        checkpoint.api = default_api

    if not checkpoint.start_date:
        checkpoint.start_date = start_date
        checkpoint.save()

    if not api_object.all_stock_data:
        facilities = api_object.test_facilities
    else:
        facilities = api_object.get_ids()
    if checkpoint.location:
        external_id = api_object.get_last_processed_location(checkpoint)
        if external_id:
            facilities = list(itertools.dropwhile(lambda x: int(x) != int(external_id), facilities))
            process_facility_task(api_object, facilities[0], start_from=checkpoint.api)
            facilities = facilities[1:]

    if not checkpoint.date or checkpoint.location:
        # use subtasks only during initial migration
        facilities_chunked_list = chunked(facilities, 5)
        for chunk in facilities_chunked_list:
            api_object.process_data(process_facility_task, chunk)
    else:
        offset = checkpoint.offset
        for stock_api in itertools.dropwhile(
            lambda x: x.name != checkpoint.api, api_object.get_stock_apis_objects()
        ):
            stock_api.add_date_filter(checkpoint.date, checkpoint.start_date)
            synchronization(
                stock_api,
                checkpoint,
                checkpoint.date,
                1000,
                offset,
                params={'domain': api_object.domain},
                domain=api_object.domain
            )
            offset = 0

    checkpoint = StockDataCheckpoint.objects.get(domain=api_object.domain)
    save_stock_data_checkpoint(checkpoint, default_api, 1000, 0, checkpoint.start_date, None, False)
    checkpoint.start_date = None
    checkpoint.save()
Пример #6
0
def sync_stock_transaction(domain, endpoint, facility, xform, checkpoint,
                           date, limit=100, offset=0):
    has_next = True
    next_url = ""
    while has_next:
        supply_point = facility
        case = SupplyPointCase.view('hqcase/by_domain_external_id',
                                    key=[domain, str(supply_point)],
                                    reduce=False,
                                    include_docs=True,
                                    limit=1).first()
        if not case:
            break
        meta, stocktransactions = endpoint.get_stocktransactions(next_url_params=next_url,
                                                                 limit=limit,
                                                                 offset=offset,
                                                                 filters=(dict(supply_point=supply_point,
                                                                               date__gte=date,
                                                                               order_by='date')))
        save_stock_data_checkpoint(checkpoint,
                                   'stock_transaction',
                                   meta.get('limit') or limit,
                                   meta.get('offset') or offset,
                                   date, facility, True)
        transactions_to_add = []
        with transaction.commit_on_success():
            for stocktransaction in stocktransactions:
                if case:
                    product = Product.get_by_code(domain, stocktransaction.product)
                    report = StockReport(
                        form_id=xform._id,
                        date=force_to_datetime(stocktransaction.date),
                        type='balance',
                        domain=domain
                    )
                    report.save()
                    try:
                        sql_product = SQLProduct.objects.get(product_id=product._id)
                    except SQLProduct.DoesNotExist:
                        continue

                    transactions_to_add.append(StockTransaction(
                        case_id=case._id,
                        product_id=product._id,
                        sql_product=sql_product,
                        section_id='stock',
                        type='stockonhand',
                        stock_on_hand=Decimal(stocktransaction.ending_balance),
                        report=report
                    ))
        # Doesn't send signal
        StockTransaction.objects.bulk_create(transactions_to_add)
        if not meta.get('next', False):
            has_next = False
        else:
            next_url = meta['next'].split('?')[1]
Пример #7
0
def sync_stock_transactions_for_facility(domain, endpoint, facility, checkpoint,
                                         date, limit=1000, offset=0):
    """
    Syncs stock data from StockTransaction objects in ILSGateway to StockTransaction objects in HQ
    """
    has_next = True
    next_url = ""
    section_id = 'stock'
    supply_point = facility
    case = get_supply_point_case_in_domain_by_id(domain, supply_point)
    if not case:
        return
    location_id = case.location_id
    save_stock_data_checkpoint(checkpoint, 'stock_transaction', limit, offset, date, location_id, True)

    products_saved = set()
    while has_next:
        meta, stocktransactions = endpoint.get_stocktransactions(
            next_url_params=next_url,
            limit=limit,
            offset=offset,
            filters={
                'supply_point': supply_point,
                'date__gte': date,
                'date__lte': checkpoint.start_date
            }
        )
        # set the checkpoint right before the data we are about to process
        meta_limit = meta.get('limit') or limit
        meta_offset = meta.get('offset') or offset
        save_stock_data_checkpoint(
            checkpoint, 'stock_transaction', meta_limit, meta_offset, date, location_id, True
        )
        transactions_to_add = []
        with transaction.atomic():
            for stocktransaction in stocktransactions:
                transactions = sync_stock_transaction(stocktransaction, domain, case, bulk=True)
                transactions_to_add.extend(transactions)
                products_saved.update(map(lambda x: x.product_id, transactions))

        if transactions_to_add:
            # Doesn't send signal
            StockTransaction.objects.bulk_create(transactions_to_add)

        if not meta.get('next', False):
            has_next = False
        else:
            next_url = meta['next'].split('?')[1]
    for product in products_saved:
        # if we saved anything rebuild the stock state object by firing the signal
        # on the last transaction for each product
        last_st = StockTransaction.latest(case.get_id, section_id, product)
        update_stock_state_for_transaction(last_st)
Пример #8
0
def sync_stock_transactions_for_facility(domain, endpoint, facility, checkpoint,
                                         date, limit=1000, offset=0):
    """
    Syncs stock data from StockTransaction objects in ILSGateway to StockTransaction objects in HQ
    """
    has_next = True
    next_url = ""
    section_id = 'stock'
    supply_point = facility
    case = get_supply_point_case_in_domain_by_id(domain, supply_point)
    if not case:
        return
    location_id = case.location_id
    save_stock_data_checkpoint(checkpoint, 'stock_transaction', limit, offset, date, location_id, True)

    products_saved = set()
    while has_next:
        meta, stocktransactions = endpoint.get_stocktransactions(
            next_url_params=next_url,
            limit=limit,
            offset=offset,
            filters={
                'supply_point': supply_point,
                'date__gte': date,
                'date__lte': checkpoint.start_date
            }
        )
        # set the checkpoint right before the data we are about to process
        meta_limit = meta.get('limit') or limit
        meta_offset = meta.get('offset') or offset
        save_stock_data_checkpoint(
            checkpoint, 'stock_transaction', meta_limit, meta_offset, date, location_id, True
        )
        transactions_to_add = []
        with transaction.atomic():
            for stocktransaction in stocktransactions:
                transactions = sync_stock_transaction(stocktransaction, domain, case, bulk=True)
                transactions_to_add.extend(transactions)
                products_saved.update(map(lambda x: x.product_id, transactions))

        if transactions_to_add:
            # Doesn't send signal
            StockTransaction.objects.bulk_create(transactions_to_add)

        if not meta.get('next', False):
            has_next = False
        else:
            next_url = meta['next'].split('?')[1]
    for product in products_saved:
        # if we saved anything rebuild the stock state object by firing the signal
        # on the last transaction for each product
        last_st = StockTransaction.latest(case.get_id, section_id, product)
        update_stock_state_for_transaction(last_st)
Пример #9
0
def sync_product_stock(domain, endpoint, facility, checkpoint, date, limit=100, offset=0):
    has_next = True
    next_url = ""
    while has_next:
        supply_point = facility
        case = SupplyPointCase.view('hqcase/by_domain_external_id',
                                    key=[domain, str(supply_point)],
                                    reduce=False,
                                    include_docs=True,
                                    limit=1).first()
        meta, product_stocks = endpoint.get_productstocks(
            next_url_params=next_url,
            limit=limit,
            offset=offset,
            filters=dict(supply_point=supply_point, last_modified__gte=date)
        )
        save_stock_data_checkpoint(checkpoint,
                                   'product_stock',
                                   meta.get('limit') or limit,
                                   meta.get('offset') or offset,
                                   date, facility, True)
        for product_stock in product_stocks:
            if case:
                product = Product.get_by_code(domain, product_stock.product)
                try:
                    stock_state = StockState.objects.get(section_id='stock',
                                                         case_id=case._id,
                                                         product_id=product._id)
                    stock_state.last_modified_date = product_stock.last_modified
                    stock_state.stock_on_hand = product_stock.quantity or 0
                except StockState.DoesNotExist:
                    stock_state = StockState(section_id='stock',
                                             case_id=case._id,
                                             product_id=product._id,
                                             stock_on_hand=product_stock.quantity or 0,
                                             last_modified_date=product_stock.last_modified,
                                             sql_product=SQLProduct.objects.get(product_id=product._id))

                if product_stock.auto_monthly_consumption:
                    stock_state.daily_consumption = product_stock.auto_monthly_consumption / DAYS_IN_MONTH
                else:
                    stock_state.daily_consumption = None
                stock_state.save()

        if not meta.get('next', False):
            has_next = False
        else:
            next_url = meta['next'].split('?')[1]
Пример #10
0
def stock_data_task(api_object):
    # checkpoint logic
    start_date = datetime.today()
    default_api = api_object.apis[0][0]

    checkpoint, _ = StockDataCheckpoint.objects.get_or_create(domain=api_object.domain, defaults={
        'api': default_api,
        'date': None,
        'limit': 1000,
        'offset': 0,
        'location': None,
        'start_date': start_date
    })

    if not checkpoint.start_date:
        checkpoint.start_date = start_date
        checkpoint.save()

    if not api_object.all_stock_data:
        facilities = api_object.test_facilities
    else:
        facilities = api_object.get_ids()

    if checkpoint.location:
        external_id = api_object.get_last_processed_location(checkpoint)
        if external_id:
            facilities = list(itertools.dropwhile(lambda x: int(x) != int(external_id), facilities))
            process_facility_task(api_object, facilities[0], start_from=checkpoint.api)
            facilities = facilities[1:]

    if not checkpoint.date:
        # use subtasks only during initial migration
        facilities_chunked_list = chunked(facilities, 5)

        for chunk in facilities_chunked_list:
            res = chain(process_facility_task.si(api_object, fac) for fac in chunk)()
            res.get()

    else:
        for facility in facilities:
            process_facility_task(api_object, facility)

    checkpoint = StockDataCheckpoint.objects.get(domain=api_object.domain)
    save_stock_data_checkpoint(checkpoint, default_api, 1000, 0, start_date, None, False)
    checkpoint.start_date = None
    checkpoint.save()
Пример #11
0
def process_facility_task(api_object, facility, start_from=None):
    checkpoint = StockDataCheckpoint.objects.get(domain=api_object.domain)
    limit = checkpoint.limit
    offset = checkpoint.offset
    apis = api_object.apis

    if start_from is not None:
        apis = itertools.dropwhile(lambda x: x[0] != checkpoint.api, api_object.apis)

    for idx, (api_name, api_function) in enumerate(apis):
        api_function(
            domain=api_object.domain,
            checkpoint=checkpoint,
            date=checkpoint.date,
            limit=limit,
            offset=offset,
            endpoint=api_object.endpoint,
            facility=facility,
        )
        limit = 1000
        offset = 0
    save_stock_data_checkpoint(checkpoint, "", 1000, 0, checkpoint.date, api_object.get_location_id(facility))
Пример #12
0
def process_facility_task(api_object, facility, start_from=None):
    checkpoint = StockDataCheckpoint.objects.get(domain=api_object.domain)
    limit = checkpoint.limit
    offset = checkpoint.offset
    apis = api_object.apis

    if start_from is not None:
        apis = itertools.dropwhile(lambda x: x[0] != checkpoint.api, api_object.apis)

    for idx, (api_name, api_function) in enumerate(apis):
        api_function(
            domain=api_object.domain,
            checkpoint=checkpoint,
            date=checkpoint.date,
            limit=limit,
            offset=offset,
            endpoint=api_object.endpoint,
            facility=facility
        )
        limit = 1000
        offset = 0
    save_stock_data_checkpoint(checkpoint, '', 1000, 0, checkpoint.date, api_object.get_location_id(facility))
Пример #13
0
def stock_data_task(domain, endpoint, apis, config, test_facilities=None):
    # checkpoint logic
    start_date = datetime.today()
    default_api = apis[0][0]

    try:
        checkpoint = StockDataCheckpoint.objects.get(domain=domain)
        api = checkpoint.api
        # legacy
        if api == 'product_stock':
            api = default_api
        date = checkpoint.date
        limit = checkpoint.limit
        offset = checkpoint.offset
        location = checkpoint.location
        if not checkpoint.start_date:
            checkpoint.start_date = start_date
            checkpoint.save()
        else:
            start_date = checkpoint.start_date
    except StockDataCheckpoint.DoesNotExist:
        checkpoint = StockDataCheckpoint()
        checkpoint.domain = domain
        checkpoint.start_date = start_date
        api = default_api
        date = None
        limit = 1000
        offset = 0
        location = None

    if not config.all_stock_data:
        facilities = test_facilities
    else:
        supply_points_ids = SQLLocation.objects.filter(
            domain=domain,
            location_type__in=get_reporting_types(domain)
        ).order_by('created_at').values_list('supply_point_id', flat=True)
        facilities = [doc['external_id'] for doc in iter_docs(SupplyPointCase.get_db(), supply_points_ids)]

    apis_from_checkpoint = itertools.dropwhile(lambda x: x[0] != api, apis)
    facilities_copy = list(facilities)
    if location:
        supply_point = SupplyPointCase.get_by_location_id(domain, location.location_id)
        external_id = supply_point.external_id if supply_point else None
        if external_id:
            facilities = itertools.dropwhile(lambda x: int(x) != int(external_id), facilities)

    for idx, (api_name, api_function) in enumerate(apis_from_checkpoint):
        api_function(
            domain=domain,
            checkpoint=checkpoint,
            date=date,
            limit=limit,
            offset=offset,
            endpoint=endpoint,
            facilities=facilities
        )
        limit = 1000
        offset = 0
        # todo: see if we can avoid modifying the list of facilities in place
        if idx == 0:
            facilities = facilities_copy

    save_stock_data_checkpoint(checkpoint, default_api, 1000, 0, start_date, None, False)
    checkpoint.start_date = None
    checkpoint.save()
Пример #14
0
def sync_stock_transactions_for_facility(domain, endpoint, facility, xform, checkpoint,
                                         date, limit=1000, offset=0):
    """
    Syncs stock data from StockTransaction objects in ILSGateway to StockTransaction objects in HQ
    """
    has_next = True
    next_url = ""
    section_id = 'stock'
    supply_point = facility
    case = get_supply_point_by_external_id(domain, supply_point)
    if not case:
        return

    save_stock_data_checkpoint(checkpoint, 'stock_transaction', limit, offset, date, facility, True)

    products_saved = set()
    while has_next:
        meta, stocktransactions = endpoint.get_stocktransactions(next_url_params=next_url,
                                                                 limit=limit,
                                                                 offset=offset,
                                                                 filters=(dict(supply_point=supply_point,
                                                                               date__gte=date,
                                                                               order_by='date')))

        # set the checkpoint right before the data we are about to process
        meta_limit = meta.get('limit') or limit
        meta_offset = meta.get('offset') or offset
        save_stock_data_checkpoint(checkpoint, 'stock_transaction', meta_limit, meta_offset, date, facility, True)
        transactions_to_add = []
        with transaction.commit_on_success():
            for stocktransaction in stocktransactions:
                params = dict(
                    form_id=xform._id,
                    date=force_to_datetime(stocktransaction.date),
                    type='balance',
                    domain=domain,
                )
                try:
                    report, _ = StockReport.objects.get_or_create(**params)
                except StockReport.MultipleObjectsReturned:
                    # legacy
                    report = StockReport.objects.filter(**params)[0]

                sql_product = SQLProduct.objects.get(code=stocktransaction.product, domain=domain)
                if stocktransaction.quantity != 0:
                    transactions_to_add.append(StockTransaction(
                        case_id=case._id,
                        product_id=sql_product.product_id,
                        sql_product=sql_product,
                        section_id=section_id,
                        type='receipts' if stocktransaction.quantity > 0 else 'consumption',
                        stock_on_hand=Decimal(stocktransaction.ending_balance),
                        quantity=Decimal(stocktransaction.quantity),
                        report=report
                    ))
                transactions_to_add.append(StockTransaction(
                    case_id=case._id,
                    product_id=sql_product.product_id,
                    sql_product=sql_product,
                    section_id=section_id,
                    type='stockonhand',
                    stock_on_hand=Decimal(stocktransaction.ending_balance),
                    report=report
                ))
                products_saved.add(sql_product.product_id)

        if transactions_to_add:
            # Doesn't send signal
            StockTransaction.objects.bulk_create(transactions_to_add)

        if not meta.get('next', False):
            has_next = False
        else:
            next_url = meta['next'].split('?')[1]

    for product in products_saved:
        # if we saved anything rebuild the stock state object by firing the signal
        # on the last transaction for each product
        last_st = StockTransaction.latest(case._id, section_id, product)
        update_stock_state_for_transaction(last_st)
Пример #15
0
def stock_data_task(domain, endpoint, apis, test_facilities=None):
    start_date = datetime.today()
    try:
        checkpoint = StockDataCheckpoint.objects.get(domain=domain)
        api = checkpoint.api
        date = checkpoint.date
        limit = checkpoint.limit
        offset = checkpoint.offset
        location = checkpoint.location
        if not checkpoint.start_date:
            checkpoint.start_date = start_date
            checkpoint.save()
        else:
            start_date = checkpoint.start_date
    except StockDataCheckpoint.DoesNotExist:
        checkpoint = StockDataCheckpoint()
        checkpoint.domain = domain
        checkpoint.start_date = start_date
        api = 'product_stock'
        date = None
        limit = 100
        offset = 0
        location = None

    if TEST:
        facilities = test_facilities
    else:
        facilities = SQLLocation.objects.filter(
            domain=domain,
            location_type__iexact='FACILITY'
        ).order_by('created_at').values_list('external_id', flat=True)
    apis_from_checkpoint = itertools.dropwhile(lambda x: x[0] != api, apis)
    facilities_copy = list(facilities)
    if location:
        supply_point = SupplyPointCase.view(
            'commtrack/supply_point_by_loc',
            key=[location.domain, location.location_id],
            include_docs=True,
            classes={'CommCareCase': SupplyPointCase},
        ).one()
        external_id = supply_point.external_id if supply_point else None
        if external_id:
            facilities = itertools.dropwhile(lambda x: int(x) != int(external_id), facilities)

    for idx, api in enumerate(apis_from_checkpoint):
        api[1](
            domain=domain,
            checkpoint=checkpoint,
            date=date,
            limit=limit,
            offset=offset,
            endpoint=endpoint,
            facilities=facilities
        )
        limit = 100
        offset = 0
        if idx == 0:
            facilities = facilities_copy
    save_stock_data_checkpoint(checkpoint, 'product_stock', 100, 0, start_date, None, False)
    checkpoint.start_date = None
    checkpoint.save()