def _handle_generate_lifetime_campaign_report(campaign_id):
    now = datetime.utcnow()
    campaign = PromoCampaign._byID(campaign_id, data=True)
    start = campaign.start_date.replace(tzinfo=pytz.utc)
    end = campaign.end_date.replace(tzinfo=pytz.utc)
    now = now.replace(tzinfo=pytz.utc)

    end = min([now, end])

    g.log.info("generating report for campaign %s" % campaign._fullname)

    report_id = report.queue_report(
        start=start,
        end=end,
        parameters=[{
            "flightId": campaign.external_flight_id,
        }],
    )

    try:
        _process_lifetime_campaign_report(
            campaign=campaign,
            report_id=report_id,
            queued_date=now,
        )

        g.log.info("successfully processed report for campaign (%s/%s)" %
            (campaign._fullname, report_id))
    except report.ReportFailedException as e:
        g.log.error(e)
        # retry if report failed
        _generate_promo_report(campaign)
def _handle_generate_lifetime_campaign_report(campaign_id):
    now = datetime.utcnow()
    campaign = PromoCampaign._byID(campaign_id, data=True)
    start = campaign.start_date.replace(tzinfo=pytz.utc)
    end = campaign.end_date.replace(tzinfo=pytz.utc)
    now = now.replace(tzinfo=pytz.utc)

    end = min([now, end])

    g.log.info("generating report for campaign %s" % campaign._fullname)

    report_id = report.queue_report(
        start=start,
        end=end,
        parameters=[{
            "flightId": campaign.external_flight_id,
        }],
    )

    try:
        _process_lifetime_campaign_report(
            campaign=campaign,
            report_id=report_id,
            queued_date=now,
        )

        g.log.info("successfully processed report for campaign (%s/%s)" %
                   (campaign._fullname, report_id))
    except report.ReportFailedException as e:
        g.log.error(e)
        # retry if report failed
        _generate_promo_report(campaign)
def _handle_generate_daily_link_reports(link_ids, campaign_ids):
    now = datetime.utcnow()
    links = Link._byID(link_ids, data=True, return_dict=False)
    campaigns = PromoCampaign._byID(campaign_ids, data=True, return_dict=False)

    if not campaigns:
        return

    links_start, links_end = _get_campaigns_date_range(campaigns)
    now = now.replace(tzinfo=pytz.utc)
    links_start = links_start.replace(tzinfo=pytz.utc)
    links_end = links_end.replace(tzinfo=pytz.utc)

    # if data has already been processed then there's no need
    # to redo it.  use the last time the report was run as a 
    # starting point, but subtract 24hrs since initial numbers
    # are preliminary.
    last_run = min(getattr(l, "last_daily_report_run", links_start) for l in links)
    start = max(
        last_run - timedelta(hours=24),
        links_start,
    )

    # in cases where we may be running a report well after a link
    # has completed ensure we always use the actual start.
    if start > links_end:
        start = links_start

    end = min([now, links_end])

    link_fullnames = ",".join([l._fullname for l in links])
    g.log.info("generating report for link %s (%s-%s)" % (
        link_fullnames, start.strftime('%Y-%m-%d'), end.strftime('%Y-%m-%d')))

    report_id = report.queue_report(
        start=start,
        end=end,
        groups=["optionId", "day"],
        parameters=[{
            "campaignId": l.external_campaign_id,
        } for l in links],
    )

    g.log.info("processing report for link (%s/%s)" %
        (link_fullnames, report_id))

    try:
        _process_daily_link_reports(
            links=links,
            report_id=report_id,
            queued_date=now,
        )

        g.log.info("successfully processed report for link (%s/%s)" %
            (link_fullnames, report_id))
    except report.ReportFailedException as e:
        g.log.error(e)
        # retry if report failed
        _generate_link_reports(links)
def _handle_generate_lifetime_campaign_reports(campaign_ids):
    now = datetime.utcnow()
    campaigns = PromoCampaign._byID(campaign_ids, data=True, return_dict=False)
    start = min(c.start_date for c in campaigns).replace(tzinfo=pytz.utc)
    end = max(c.end_date for c in campaigns).replace(tzinfo=pytz.utc)
    now = now.replace(tzinfo=pytz.utc)

    end = min([now, end])

    campaign_fullnames = ",".join(c._fullname for c in campaigns)

    g.log.info("generating report for campaigns %s (%s-%s)" % (
        campaign_fullnames, start.strftime('%Y-%m-%d'), end.strftime('%Y-%m-%d')))

    report_id = report.queue_report(
        start=start,
        end=end,
        groups=["optionId"],
        parameters=[{
            "flightId": c.external_flight_id,
        } for c in campaigns],
    )

    try:
        _process_lifetime_campaign_reports(
            campaigns=campaigns,
            report_id=report_id,
            queued_date=now,
        )

        g.log.info("successfully processed report for campaigns (%s/%s)" %
            (campaign_fullnames, report_id))
    except report.ReportFailedException as e:
        g.log.error(e)
        # retry if report failed
        _generate_promo_reports(campaigns)
def _handle_generate_daily_link_report(link_id):
    now = datetime.utcnow()
    link = Link._byID(link_id, data=True)
    campaigns = list(PromoCampaign._by_link(link._id))

    if not campaigns:
        return

    link_start = min([promo.start_date for promo in campaigns])
    link_end = max([promo.end_date for promo in campaigns])

    now = now.replace(tzinfo=pytz.utc)
    link_start = link_start.replace(tzinfo=pytz.utc)
    link_end = link_end.replace(tzinfo=pytz.utc)

    # if data has already been processed then there's no need
    # to redo it.  use the last time the report was run as a 
    # starting point, but subtract 24hrs since initial numbers
    # are preliminary.
    if hasattr(link, "last_daily_report_run"):
        start = max([
            link.last_daily_report_run - timedelta(hours=24),
            link_start,
        ])

        # in cases where we may be running a report well after a link
        # has completed ensure we always use the actual start.
        if start > link_end:
            start = link_start

    else:
        start = link_start

    end = min([now, link_end])

    g.log.info("generating report for link %s" % link._fullname)

    report_id = report.queue_report(
        start=start,
        end=end,
        groups=["optionId", "day"],
        parameters=[{
            "campaignId": link.external_campaign_id,
        }],
    )

    g.log.info("processing report for link (%s/%s)" %
        (link._fullname, report_id))

    try:
        _process_daily_link_report(
            link=link,
            report_id=report_id,
            queued_date=now,
        )

        g.log.info("successfully processed report for link (%s/%s)" %
            (link._fullname, report_id))
    except report.ReportFailedException as e:
        g.log.error(e)
        # retry if report failed
        _generate_link_report(link)
def _handle_generate_daily_link_report(link_id):
    now = datetime.utcnow()
    link = Link._byID(link_id, data=True)
    campaigns = list(PromoCampaign._by_link(link._id))

    if not campaigns:
        return

    link_start = min([promo.start_date for promo in campaigns])
    link_end = max([promo.end_date for promo in campaigns])

    now = now.replace(tzinfo=pytz.utc)
    link_start = link_start.replace(tzinfo=pytz.utc)
    link_end = link_end.replace(tzinfo=pytz.utc)

    # if data has already been processed then there's no need
    # to redo it.  use the last time the report was run as a
    # starting point, but subtract 24hrs since initial numbers
    # are preliminary.
    if hasattr(link, "last_daily_report_run"):
        start = max([
            link.last_daily_report_run - timedelta(hours=24),
            link_start,
        ])

        # in cases where we may be running a report well after a link
        # has completed ensure we always use the actual start.
        if start > link_end:
            start = link_start

    else:
        start = link_start

    end = min([now, link_end])

    g.log.info("generating report for link %s" % link._fullname)

    report_id = report.queue_report(
        start=start,
        end=end,
        groups=["optionId", "day"],
        parameters=[{
            "campaignId": link.external_campaign_id,
        }],
    )

    g.log.info("processing report for link (%s/%s)" %
               (link._fullname, report_id))

    try:
        _process_daily_link_report(
            link=link,
            report_id=report_id,
            queued_date=now,
        )

        g.log.info("successfully processed report for link (%s/%s)" %
                   (link._fullname, report_id))
    except report.ReportFailedException as e:
        g.log.error(e)
        # retry if report failed
        _generate_link_report(link)