def request_share(self) -> bool: if not self.is_in_send_window: return False if self.created_at >= now() - datetime.timedelta(days=7): return False # Double check that we're all good to go share_window_start, share_window_end = self.get_share_window() if (not self.last_alert_sent_at or self.last_alert_sent_at <= share_window_start or self.last_alert_sent_at >= share_window_end): return False # Check the last share request we sent was a long time ago share_request = self.get_last_share_request() if share_request and share_request.timestamp >= self.get_share_request_cutoff( ): return False message = gettext( "Has Hazebot been helpful? We’re looking for ways to grow and improve, and we’d love your help. Save our contact and share Hazebot with a friend, or text “feedback” to send feedback." ) if not self.send_message(message): return False Event.query.create(self.id, EventType.SHARE_REQUEST) return True
def is_in_send_window(self) -> bool: if self.zipcode_id is None: return False # Timezone can be null since our data is incomplete. timezone = self.zipcode.timezone or "America/Los_Angeles" dt = now(timezone=timezone) send_start, send_end = self.SEND_WINDOW_HOURS return send_start <= dt.hour < send_end
def __init__(self, *args, timezone="America/Los_Angeles", render_kw: dict = None, **kwargs): render_kw = render_kw or {} render_kw["max"] = kwargs.get("default", now()).strftime("%Y-%m-%dT%H:%M") kwargs["format"] = "%Y-%m-%dT%H:%M" super().__init__(*args, render_kw=render_kw, **kwargs) self._timezone = timezone
def admin_bulk_sms(): if not current_user.can_send_sms: return redirect(url_for("admin_summary")) form = BulkSMSForm(last_active_at=now()) if form.validate_on_submit(): bulk_send.delay(form.data["message"], form.data["last_active_at"].timestamp()) flash("Sent!") return redirect(url_for("admin_summary")) return render_template( "bulk_sms.html", form=form, num_inactive=Client.query.filter_inactive_since(timestamp()).count(), )
def filter_eligible_for_share_requests(self) -> "ClientQuery": subq = (Event.query.filter( Event.type_code == EventType.SHARE_REQUEST).filter( Event.timestamp > Client.get_share_request_cutoff()). with_entities(Event.client_id, Event.timestamp).subquery()) share_window_start, share_window_end = Client.get_share_window() return ( self.filter_phones().outerjoin( subq, and_(subq.c.client_id == Client.id)).filter( subq.c.timestamp == None) # Client must have signed up more than 7 days ago .filter( Client.created_at < now() - datetime.timedelta(days=7)).filter( Client.last_alert_sent_at > share_window_start).filter( Client.last_alert_sent_at < share_window_end))
def should_accept_feedback(self) -> bool: # First check if the most recent event is a feedback begin or unsub event if self.has_recent_last_events_of_type({ EventType.FEEDBACK_BEGIN, EventType.UNSUBSCRIBE, }): return True # Then check if we have an outstanding feedback request cutoff = now() - datetime.timedelta(days=4) feedback_request_event = self.get_event_of_type_after( EventType.FEEDBACK_REQUEST, cutoff) # Check whether feedback was responded to if feedback_request_event and not self.get_event_of_type_after( EventType.FEEDBACK_RECEIVED, feedback_request_event.timestamp): return True return False
def get_stats(self) -> typing.Dict[str, typing.Dict[str, int]]: cutoff = clock.now() - datetime.timedelta(days=30) metrics = [m.name for m in EventType] metrics.append("NEW_USERS") keys = sorted(metrics) stats: typing.Dict[str, typing.Dict[str, int]] = collections.defaultdict( lambda: {name: 0 for name in keys} ) totals = {name: 0 for name in keys} for date, type_code, count in ( self.filter(Event.timestamp > cutoff) .with_entities( func.DATE(func.timezone("PST", Event.timestamp)).label("date"), Event.type_code, func.count(Event.id), ) .group_by("date", Event.type_code) .order_by(desc("date")) .all() ): event_date = date.strftime("%Y-%m-%d") event_type = EventType(type_code) stats[event_date][event_type.name] = count totals[event_type.name] += count # Ew. from airq.models.clients import Client for date, count in ( Client.query.filter_phones() .filter(Client.created_at > cutoff) .with_entities( func.DATE(func.timezone("PST", Client.created_at)).label("date"), func.count(Client.id), ) .group_by("date") .order_by(desc("date")) .all() ): join_date = date.strftime("%Y-%m-%d") stats[join_date]["NEW_USERS"] = count stats["TOTAL"] = totals return dict(stats)
def admin_bulk_sms(): if not current_user.can_send_sms: return redirect(url_for("admin_summary")) form = BulkSMSForm(last_active_at=now(), include_unsubscribed=False) if form.validate_on_submit(): bulk_send.delay( message=form.data["message"], last_active_at=form.data["last_active_at"].timestamp(), locale=form.data["locale"], include_unsubscribed=form.data["include_unsubscribed"], is_feedback_request=form.data["is_feedback_request"], ) flash("Sent!") return redirect(url_for("admin_summary")) return render_template( "bulk_sms.html", form=form, num_inactive=Client.query.filter_inactive_since( timestamp(), form.data["include_unsubscribed"]).count(), )
def get_stats(self) -> typing.Dict[str, typing.Dict[str, int]]: keys = sorted(m.name for m in EventType) stats: typing.Dict[str, typing.Dict[str, int]] = collections.defaultdict( lambda: {name: 0 for name in keys}) totals = {name: 0 for name in keys} for date, type_code, count in (self.filter( Event.timestamp > clock.now() - datetime.timedelta(days=30)).with_entities( func.DATE(func.timezone("PST", Event.timestamp)).label("date"), Event.type_code, func.count(Event.id), ).group_by("date", Event.type_code).order_by(desc("date")).all()): send_date = date.strftime("%Y-%m-%d") event_type = EventType(type_code) stats[send_date][event_type.name] = count totals[event_type.name] += count stats["TOTAL"] = totals return dict(stats)
def has_recent_last_event_of_type(self, event_type: EventType) -> bool: last_event = self.get_last_client_event() return bool( last_event and last_event.type_code == event_type and now() - last_event.timestamp < Client.EVENT_RESPONSE_TIME)
def get_share_request_cutoff(self) -> datetime.datetime: return now() - datetime.timedelta(days=60)
def get_total_new(self) -> int: """Number of new clients in the last day""" return (self.filter_phones().filter( func.timezone("PST", Client.created_at) > now().date()). with_entities(func.count(Client.id)).scalar() or 0)
def should_accept_feedback(self) -> bool: last_event = self.get_last_client_event() return bool( last_event and last_event.type_code in (EventType.FEEDBACK_BEGIN, EventType.UNSUBSCRIBE) and now() - last_event.timestamp < Client.FEEDBACK_RESPONSE_TIME)
def _metrics_sync(): logger = get_celery_logger() updates = [] ts = now() zipcodes_to_sensors = collections.defaultdict(list) for zipcode_id, latest_reading, humidity, pm_cf_1, sensor_id, distance in ( Sensor.query.join(SensorZipcodeRelation).filter( Sensor.updated_at > ts.timestamp() - (30 * 60)).with_entities( SensorZipcodeRelation.zipcode_id, Sensor.latest_reading, Sensor.humidity, Sensor.pm_cf_1, Sensor.id, SensorZipcodeRelation.distance, ).all()): zipcodes_to_sensors[zipcode_id].append( (latest_reading, humidity, pm_cf_1, sensor_id, distance)) for zipcode_id, sensor_tuples in zipcodes_to_sensors.items(): pm_25_readings: typing.List[float] = [] pm_cf_1_readings: typing.List[float] = [] humidities: typing.List[float] = [] closest_reading = float("inf") farthest_reading = 0.0 sensor_ids: typing.List[int] = [] for pm_25, humidity, pm_cf_1, sensor_id, distance in sorted( sensor_tuples, key=lambda s: s[-1]): if (len(pm_25_readings) < DESIRED_NUM_READINGS or distance < DESIRED_READING_DISTANCE_KM): pm_25_readings.append(pm_25) humidities.append(humidity) pm_cf_1_readings.append(pm_cf_1) sensor_ids.append(sensor_id) closest_reading = min(distance, closest_reading) farthest_reading = max(distance, farthest_reading) else: break if pm_25_readings: num_sensors = len(pm_25_readings) pm25 = round(sum(pm_25_readings) / num_sensors, ndigits=3) humidity = round(sum(humidities) / num_sensors, ndigits=3) pm_cf_1 = round(sum(pm_cf_1_readings) / num_sensors, ndigits=3) min_sensor_distance = round(closest_reading, ndigits=3) max_sensor_distance = round(farthest_reading, ndigits=3) details = { "num_sensors": num_sensors, "min_sensor_distance": min_sensor_distance, "max_sensor_distance": max_sensor_distance, "sensor_ids": sensor_ids, } updates.append({ "id": zipcode_id, "pm25": pm25, "humidity": humidity, "pm_cf_1": pm_cf_1, "pm25_updated_at": ts.timestamp(), "metrics_data": details, }) logger.info("Updating %s zipcodes", len(updates)) for mappings in chunk_list(updates, batch_size=5000): db.session.bulk_update_mappings(Zipcode, mappings) db.session.commit()