def get_us_daily():
    flask.current_app.logger.info('Retrieving US Daily')
    include_preview = request.args.get('preview',
                                       default=False,
                                       type=inputs.boolean)
    states_daily = states_daily_query(
        preview=include_preview).subquery('states_daily')

    # get a list of columns to aggregate, sum over those from the states_daily subquery
    colnames = CoreData.numeric_fields()
    col_list = [
        label(colname, func.sum(getattr(states_daily.c, colname)))
        for colname in colnames
    ]
    # Add a column to count the records contributing to this date. That should
    # correspond to the number of states, assuming `states_daily` returns
    # only a single row per state.
    col_list.append(label('states', func.count()))
    us_daily = db.session.query(states_daily.c.date, *col_list).group_by(
        states_daily.c.date).order_by(states_daily.c.date.desc()).all()

    us_data_by_date = []
    for day in us_daily:
        result_dict = day._asdict()
        result_dict.update({
            'dateChecked': day.date.isoformat(),
            'date': day.date.strftime('%Y-%m-%d'),
        })
        us_data_by_date.append(result_dict)

    return flask.jsonify(us_data_by_date)
def get_states_daily():
    flask.current_app.logger.info('Retrieving States Daily')
    include_preview = request.args.get('preview',
                                       default=False,
                                       type=inputs.boolean)
    latest_daily_data = states_daily_query(preview=include_preview).all()
    return flask.jsonify([x.to_dict() for x in latest_daily_data])
def get_states_daily_for_state(state):
    flask.current_app.logger.info('Retrieving States Daily for state %s' %
                                  state)
    include_preview = request.args.get('preview',
                                       default=False,
                                       type=inputs.boolean)
    latest_daily_data_for_state = states_daily_query(
        state=state.upper(), preview=include_preview).all()
    if len(latest_daily_data_for_state) == 0:
        # likely state not found
        return flask.Response("States Daily data unavailable for state %s" %
                              state,
                              status=404)

    return flask.jsonify([x.to_dict() for x in latest_daily_data_for_state])
def get_states_daily_v2_internal(state=None,
                                 include_preview=False,
                                 simple=False):
    latest_daily_data = states_daily_query(
        state=state.upper() if state else None, preview=include_preview).all()
    if len(latest_daily_data) == 0:
        # likely state not found
        return flask.Response('States Daily data unavailable for state %s' %
                              state if state else 'all')

    # only do the caching/precomputation of calculated data if we need to
    calculator = None if simple else ValuesCalculator(latest_daily_data)
    out_data = []
    for core_data in latest_daily_data:
        # this and the "meta" definition are only relevant for states, not US
        last_update_time = get_value(core_data, 'lastUpdateTime')
        if last_update_time is not None:
            last_update_time = CoreData.stringify(last_update_time)
        meta = {
            'data_quality_grade': get_value(core_data, 'dataQualityGrade'),
            'updated':
            last_update_time,  # TODO: does this need to be local TZ?
            'tests': {
                'total_source': core_data.totalTestResultsSource
            }
        }
        core_data_nested_dict = {
            'date': get_value(core_data, 'date').strftime('%Y-%m-%d'),
            'state': get_value(core_data, 'state'),
            'meta': meta,
        }

        core_actual_data_dict = convert_state_core_data_to_simple_output(core_data) if simple \
            else convert_state_core_data_to_full_output(core_data, calculator)
        core_data_nested_dict.update(core_actual_data_dict)
        out_data.append(core_data_nested_dict)

    base_link = 'https://api.covidtracking.com/states'
    link = '%s/%s/daily' % (base_link,
                            state) if state else '%s/daily' % (base_link)
    if simple:
        link += '/simple'
    out = output_with_metadata(out_data, link)

    response = flask.current_app.response_class(
        json.dumps(out, sort_keys=False, indent=2),
        mimetype=flask.current_app.config['JSONIFY_MIMETYPE'])
    return response
def get_states_daily_data(preview, limit):
    latest_daily_data = states_daily_query(preview=preview, limit=limit).all()

    # rewrite date formats to match the old public sheet
    reformatted_data = []
    eastern_time = tz.gettz('EST')
    for data in latest_daily_data:
        result_dict = data.to_dict()
        result_dict.update({
            'date':
            data.date.strftime("%Y%m%d"),
            # due to DST issues, this time needs to be advanced forward one hour to match the old output
            'dateChecked': (data.dateChecked.astimezone(eastern_time) +
                            timedelta(hours=1)).strftime("%-m/%d/%Y %H:%M")
            if data.dateChecked else ""
        })

        # add the row to the output
        reformatted_data.append(result_dict)
    return reformatted_data
Esempio n. 6
0
def get_states_daily_csv():
    flask.current_app.logger.info('Retrieving States Daily')
    include_preview = request.args.get('preview',
                                       default=False,
                                       type=inputs.boolean)
    latest_daily_data = states_daily_query(preview=include_preview).all()

    # rewrite date formats to match the old public sheet
    reformatted_data = []
    state_latest_dates = {}
    eastern_time = tz.gettz('EST')
    for data in latest_daily_data:
        result_dict = data.to_dict()
        result_dict.update({
            'date':
            data.date.strftime("%Y%m%d"),
            # due to DST issues, this time needs to be advanced forward one hour to match the old output
            'dateChecked': (data.dateChecked.astimezone(eastern_time) +
                            timedelta(hours=1)).strftime("%-m/%d/%Y %H:%M")
            if data.dateChecked else ""
        })

        # for the /current endpoint, only add the row if it's the latest data for the state
        if request.endpoint == 'api.states_current':
            # if we've seen this state before and the one we saw is newer, skip this row
            if data.state in state_latest_dates and state_latest_dates[
                    data.state] > data.date:
                continue
            state_latest_dates[data.state] = data.date

        # add the row to the output
        reformatted_data.append(result_dict)

    columns = STATES_CURRENT
    if request.endpoint == 'api.states_daily':
        columns = STATES_DAILY
    columns = select(columns)

    return make_csv_response(columns, reformatted_data)
Esempio n. 7
0
def edit_core_data_from_states_daily():
    payload = flask.request.json
    flask.current_app.logger.info(
        'Received a CoreData States Daily edit request: %s' % payload)

    # validate input data
    try:
        validate_edit_data_payload(payload)
    except ValueError as e:
        flask.current_app.logger.error("Edit data failed validation: %s" %
                                       str(e))
        notify_slack_error(str(e), 'edit_core_data_from_states_daily')
        return str(e), 400

    context = payload['context']
    flask.current_app.logger.info('Creating new batch from context: %s' %
                                  context)
    batch = Batch(**context)
    batch.user = get_jwt_identity()
    batch.isRevision = True
    batch.isPublished = True
    batch.publishedAt = datetime.utcnow()
    db.session.add(batch)
    db.session.flush(
    )  # this sets the batch ID, which we need for corresponding coreData objects

    state_to_edit = payload['context']['state']
    latest_daily_data_for_state = states_daily_query(state=state_to_edit,
                                                     research=True).all()

    # split up by date for easier lookup/comparison with input edit rows
    key_to_date = defaultdict(dict)  # state -> date -> data
    for state_daily_data in latest_daily_data_for_state:
        key_to_date[state_daily_data.state][
            state_daily_data.date] = state_daily_data

    # keep track of all our changes as we go
    core_data_objects = []
    changed_rows = []
    new_rows = []

    # check each core data row that the corresponding date/state already exists in published form
    for core_data_dict in payload['coreData']:
        state = core_data_dict['state']
        valid, unknown = CoreData.valid_fields_checker(core_data_dict)
        if not valid:
            # there are no fields to add/update
            flask.current_app.logger.info(
                'Got row without updates, skipping. %r' % core_data_dict)
            continue

        # is there a date for this?
        # check that there exists at least one published row for this date/state
        date = CoreData.parse_str_to_date(core_data_dict['date'])
        data_for_date = key_to_date.get(state, {}).get(date)
        core_data_dict['batchId'] = batch.batchId
        edited_core_data = None

        if not data_for_date:
            # this is a new row: we treat this as a changed date

            # TODO: uncomment these 3 lines if we want to enforce editing only existing date rows
            # error = 'Attempting to edit a nonexistent date: %s' % core_data_dict['date']
            # flask.current_app.logger.error(error)
            # return flask.jsonify(error), 400

            flask.current_app.logger.info(
                'Row for date not found, making new edit row: %s' % date)
            edited_core_data = CoreData(**core_data_dict)
            new_rows.append(edited_core_data)
        else:
            # this row already exists, check each property to see if anything changed.
            changed_for_date = data_for_date.field_diffs(core_data_dict)
            if changed_for_date:
                changed_rows.append(changed_for_date)
                edited_core_data = data_for_date.copy_with_updates(
                    **core_data_dict)

        # if any value in the row is different, make an edit batch
        if edited_core_data:
            # store the changes
            db.session.add(edited_core_data)
            core_data_objects.append(edited_core_data)
            db.session.flush()
            flask.current_app.logger.info('Adding new edit row: %s' %
                                          edited_core_data.to_dict())
        else:
            # there were no changes
            flask.current_app.logger.info(
                'All values are the same for date %s, ignoring' % date)

    db.session.flush()

    diffs = EditDiff(changed_rows, new_rows)
    if diffs.is_empty():
        # there are no changes, nothing to do
        notify_slack_error(
            f"*Received edit batch #{batch.batchId}*. state: {state_to_edit}. (user: {batch.shiftLead})\n"
            f"{batch.batchNote} but no differences detected, data is unchanged",
            "edit_states_daily")

        return 'Data is unchanged: no edits detected', 400

    batch.changedFields = diffs.changed_fields
    batch.numRowsEdited = diffs.size()
    db.session.flush()

    # TODO: change consumer of this response to use the changedFields, changedDates, numRowsEdited
    # from the "batch" object, then remove those keys from the JSON response
    json_to_return = {
        'batch': batch.to_dict(),
        'changedFields': batch.changedFields,
        'changedDates': diffs.changed_dates_str,
        'numRowsEdited': batch.numRowsEdited,
        'user': get_jwt_identity(),
        'coreData': [core_data.to_dict() for core_data in core_data_objects],
    }

    db.session.commit()

    # collect all the diffs for the edits we've made and format them for a slack notification
    diffs_for_slack = diffs.plain_text_format()

    notify_slack(
        f"*Pushed and published {batch.dataEntryType} batch #{batch.batchId}*. state: {state_to_edit}. (user: {batch.shiftLead})\n"
        f"{batch.batchNote}", diffs_for_slack)

    return flask.jsonify(json_to_return), 201
Esempio n. 8
0
def edit_core_data_from_states_daily():
    payload = flask.request.json
    flask.current_app.logger.info(
        'Received a CoreData States Daily edit request: %s' % payload)

    # test input data
    try:
        validate_edit_data_payload(payload)
    except ValueError as e:
        flask.current_app.logger.error("Edit data failed validation: %s" %
                                       str(e))
        notify_slack_error(str(e), 'edit_core_data_from_states_daily')
        return flask.jsonify(str(e)), 400

    # we construct the batch from the push context
    context = payload['context']

    # check that the state is set
    state_to_edit = context.get('state')
    if not state_to_edit:
        flask.current_app.logger.error(
            "No state specified in batch edit context: %s" % str(e))
        notify_slack_error('No state specified in batch edit context',
                           'edit_core_data_from_states_daily')
        return flask.jsonify('No state specified in batch edit context'), 400

    flask.current_app.logger.info('Creating new batch from context: %s' %
                                  context)

    batch = Batch(**context)
    batch.user = get_jwt_identity()
    batch.isRevision = True
    batch.isPublished = True  # edit batches are published by default
    batch.publishedAt = datetime.utcnow()
    db.session.add(batch)
    db.session.flush(
    )  # this sets the batch ID, which we need for corresponding coreData objects

    latest_daily_data_for_state = states_daily_query(state=state_to_edit).all()

    # split up by date for easier lookup/comparison with input edit rows
    date_to_data = {}
    for state_daily_data in latest_daily_data_for_state:
        date_to_data[state_daily_data.date] = state_daily_data

    # check each core data row that the corresponding date/state already exists in published form
    core_data_objects = []
    changed_fields = set()
    changed_dates = set()
    for core_data_dict in payload['coreData']:
        # this state has to be identical to the state from the context
        state = core_data_dict['state']
        if state != state_to_edit:
            error = 'Context state %s does not match JSON data state %s' % (
                state_to_edit, state)
            flask.current_app.logger.error(error)
            notify_slack_error(error, 'edit_core_data_from_states_daily')
            return flask.jsonify(error), 400

        # is there a date for this?
        # check that there exists at least one published row for this date/state
        date = CoreData.parse_str_to_date(core_data_dict['date'])
        data_for_date = date_to_data.get(date)

        changed_fields_for_date = set()
        is_edited = False

        # make a new CoreData object, which we will add if we determine it represents an edited row
        core_data_dict['batchId'] = batch.batchId
        edited_core_data = CoreData(**core_data_dict)

        if not data_for_date:
            # this is a new row: we treat this as a changed date

            # TODO: uncomment these 3 lines if we want to enforce editing only existing date rows
            # error = 'Attempting to edit a nonexistent date: %s' % core_data_dict['date']
            # flask.current_app.logger.error(error)
            # return flask.jsonify(error), 400

            flask.current_app.logger.info(
                'Row for date not found, making new edit row: %s' % date)
            is_edited = True

        else:
            # this row already exists, but check each value to see if anything changed. Easiest way
            # to do this is to make a new CoreData and compare it to the existing one

            for field in CoreData.__table__.columns.keys():
                # we expect batch IDs to be different, skip comparing those
                if field == 'batchId':
                    continue
                # for any other field, compare away
                if getattr(data_for_date, field) != getattr(
                        edited_core_data, field):
                    changed_fields_for_date.add(field)
                    is_edited = True

        # if any value in the row is different, make an edit batch
        if is_edited:
            db.session.add(edited_core_data)
            core_data_objects.append(edited_core_data)
            flask.current_app.logger.info('Adding new edit row: %s' %
                                          edited_core_data.to_dict())
            changed_fields.update(changed_fields_for_date)
            changed_dates.add(date)
        else:
            flask.current_app.logger.info(
                'All values are the same for date %s, ignoring' % date)

    db.session.flush()

    # which dates got changed?
    start = sorted(changed_dates)[0].strftime('%-m/%-d/%y')
    end = sorted(changed_dates)[-1].strftime('%-m/%-d/%y')
    changed_dates_str = start if start == end else '%s - %s' % (start, end)

    json_to_return = {
        'batch': batch.to_dict(),
        'changedFields': list(changed_fields),
        'changedDates': changed_dates_str,
        'numRowsEdited': len(changed_dates),
        'user': get_jwt_identity(),
        'coreData': [core_data.to_dict() for core_data in core_data_objects],
    }

    db.session.commit()
    notify_slack(
        f"*Pushed and published edit batch #{batch.batchId}*. state: {state_to_edit}. (user: {batch.shiftLead})\n"
        f"{batch.batchNote}")

    return flask.jsonify(json_to_return), 201
Esempio n. 9
0
def edit_states_daily_internal(user,
                               context,
                               core_data,
                               state_to_edit=None,
                               publish=False):
    flask.current_app.logger.info('Creating new batch from context: %s' %
                                  context)

    batch = Batch(**context)
    batch.user = user
    batch.isRevision = True
    batch.isPublished = publish
    if publish:
        batch.publishedAt = datetime.utcnow()
    db.session.add(batch)
    db.session.flush(
    )  # this sets the batch ID, which we need for corresponding coreData objects

    latest_daily_data_for_state = states_daily_query(state=state_to_edit).all()

    # split up by date for easier lookup/comparison with input edit rows
    date_to_data = {}
    for state_daily_data in latest_daily_data_for_state:
        date_to_data[state_daily_data.date] = state_daily_data

    # keep track of all our changes as we go
    core_data_objects = []
    changed_rows = []
    new_rows = []

    # check each core data row that the corresponding date/state already exists in published form
    for core_data_dict in core_data:
        # this state has to be identical to the state from the context
        state = core_data_dict['state']
        if state_to_edit and state != state_to_edit:
            error = 'Context state %s does not match JSON data state %s' % (
                state_to_edit, state)
            flask.current_app.logger.error(error)
            notify_slack_error(error, 'edit_core_data_from_states_daily')
            return error, 400

        valid, unknown = CoreData.valid_fields_checker(core_data_dict)
        if not valid:
            # there are no fields to add/update
            flask.current_app.logger.info(
                'Got row without updates, skipping. %r' % core_data_dict)
            continue

        if unknown:
            # report unknown fields, we won't fail the request, but should at least log
            flask.current_app.logger.warning(
                'Got row with unknown field updates: %s. %r' %
                (unknown, core_data_dict))

        # is there a date for this?
        # check that there exists at least one published row for this date/state
        date = CoreData.parse_str_to_date(core_data_dict['date'])
        data_for_date = date_to_data.get(date)
        core_data_dict['batchId'] = batch.batchId
        edited_core_data = None

        if not data_for_date:
            # this is a new row: we treat this as a changed date

            # TODO: uncomment these 3 lines if we want to enforce editing only existing date rows
            # error = 'Attempting to edit a nonexistent date: %s' % core_data_dict['date']
            # flask.current_app.logger.error(error)
            # return flask.jsonify(error), 400

            flask.current_app.logger.info(
                'Row for date not found, making new edit row: %s' % date)
            edited_core_data = CoreData(**core_data_dict)
            new_rows.append(edited_core_data)
        else:
            # this row already exists, but check each value to see if anything changed. Easiest way
            changed_for_date = data_for_date.field_diffs(core_data_dict)
            if changed_for_date:
                changed_rows.append(changed_for_date)
                edited_core_data = data_for_date.copy_with_updates(
                    **core_data_dict)

        # if any value in the row is different, make an edit batch
        if edited_core_data:
            # store the changes
            db.session.add(edited_core_data)
            core_data_objects.append(edited_core_data)
            db.session.flush()
            flask.current_app.logger.info('Adding new edit row: %s' %
                                          edited_core_data.to_dict())
        else:
            # there were no changes
            flask.current_app.logger.info(
                'All values are the same for date %s, ignoring' % date)

    db.session.flush()

    diffs = EditDiff(changed_rows, new_rows)
    if diffs.is_empty():
        # there are no changes, nothing to do
        notify_slack_error(
            f"*Received edit batch #{batch.batchId}*. state: {state_to_edit}. (user: {batch.shiftLead})\n"
            f"{batch.batchNote} but no differences detected, data is unchanged",
            "edit_states_daily")

        return 'Data is unchanged: no edits detected', 400

    batch.changedFields = diffs.changed_fields
    batch.numRowsEdited = diffs.size()
    db.session.flush()

    # TODO: change consumer of this response to use the changedFields, changedDates, numRowsEdited
    # from the "batch" object, then remove those keys from the JSON response
    json_to_return = {
        'batch': batch.to_dict(),
        'changedFields': batch.changedFields,
        'changedDates': diffs.changed_dates_str,
        'numRowsEdited': batch.numRowsEdited,
        'user': get_jwt_identity(),
        'coreData': [core_data.to_dict() for core_data in core_data_objects],
    }

    db.session.commit()

    # collect all the diffs for the edits we've made and format them for a slack notification
    diffs_for_slack = diffs.plain_text_format()

    notify_slack(
        f"*Pushed and published edit batch #{batch.batchId}*. state: {state_to_edit}. (user: {batch.shiftLead})\n"
        f"{batch.batchNote}", diffs_for_slack)

    return flask.jsonify(json_to_return), 201