def get_color_class(tender, class_value=''):
    if tender.winner:
        return class_value + 'bg-danger'
    if tender.favourite:
        return class_value + 'bg-warning'
    if tender.published > days_ago(7):
        return class_value + 'bg-info'
    return class_value
Exemple #2
0
def server_app(db):
    """
	Runner for server.
	"""

    # db.add_all_nodes()
    global linear_model
    while True:
        # Each day, add date as key to dataset and drop the oldest day
        for i in range(7):
            time.sleep(86400)

            date_to_drop = utils.days_ago(utils.DAYS_TO_KEEP + 1)
            nodes = db.get_nodes()
            for node in nodes:
                node["rain_data"].pop(date_to_drop)
                node["avg_levels"].pop(date_to_drop)
                node["is_flooded"].pop(date_to_drop)

                node["rain_data"][utils.cur_date()] = []
                node["avg_levels"][utils.cur_date()] = 0
                node["is_flooded"][utils.cur_date()] = 0

        # Each week, retrain the models using new data.
        # In production, use weather API data. For now, use generated values to test.

        # Dump updated nodewise average water level data to depths_train.txt
        depths_dict = node_api.get_reported_water_levels()
        depths = np.empty(
            (len(depths_dict.keys()), len(depths_dict[0].keys())))
        for date_idx, date in enumerate(sorted(depths_dict[0].keys())):
            for node_id in sorted(depths_dict.keys()):
                this_mean = np.mean(depths_dict[node_id][date])
                if not this_mean:
                    this_mean = 0
                depths[date_idx][node_id] = this_mean
        np.savetxt("depths_train.txt", depths)

        # Retrain models
        db.linear_model = models.LinearRainModel()
        for model in db.XGB_models.values():
            model.train()
            model.test()
Exemple #3
0
def harvest_updates(days=2, test=False):
    """
    Fetch updated pics and write to file.
    Default to days as 2 so that we get yesterday's date.
    """
    updated_date = days_ago(days)
    logger.info("Harvesting updated pictures since {}.".format(updated_date))
    query = QUERY.replace("2000-01-01", updated_date)
    g = Graph()
    done = 0
    for pict in client.filter_query(query):
        g += client.to_graph(pict, PersonPicture)
        done += 1
        if test is True:
            if done > 10:
                break
    if len(g) > 0:
        backend.post_updates(NG, g)
        logger.info(
            "Updated picture harvest complete. Updated: {}".format(done))
    else:
        logger.info("No updated pictures found.")
Exemple #4
0
def dashboard(request):
  # Aggregate data.
  # Recent 90 days trend
  start_date = days_ago(90)
  base_ = Checkee.objects.filter(checked_at__gt=(start_date))
  raw_v_data = {}  # Raw cleared data
  raw_s_data = {}  # Raw serializable data
  aggr_plot_data = {}
  aggr_data = {}
  for visa_type_ in const.VISA_TYPES:
    visa_type = visa_type_[0]
    raw_v_data[visa_type] = base_.filter(
        application_status='Clear', visa_type=visa_type)
    raw_s_data[visa_type] = DateTimeJSONEncoder().encode(
        list(base_.filter(visa_type=visa_type).values_list(
            'checked_at', 'cleared_at', 'application_status')))
    # Calculate aggregated data
    v_dat = {}
    c_dates = raw_v_data[visa_type].values_list(
        'cleared_at', 'checked_at')
    all_waits = [(d[0] - d[1]).days for d in c_dates]
    try:
      v_dat['avg_wait'] = sum(all_waits) / len(all_waits)
    except ZeroDivisionError:
      v_dat['avg_wait'] = None
    v_dat['applicants'] = base_.filter(visa_type=visa_type).count()
    v_dat['cleared'] = len(all_waits)
    try:
      v_dat['cleared_ratio'] = '%.2f%%' % (
          100.0 * v_dat['cleared'] / v_dat['applicants'])
    except ZeroDivisionError:
      v_dat['cleared_ratio'] = None
    try:
      v_dat['last_clearance'] = sorted(
          [t[0] for t in c_dates])[-1].strftime(const.CHECKEE_TIMEFMT)
    except IndexError:
      v_dat['last_clearance'] = None
    try:
      v_dat['last_application'] = sorted(
          [t[1] for t in c_dates])[-1].strftime(const.CHECKEE_TIMEFMT)
    except IndexError:
      v_dat['last_application'] = None
    aggr_data[visa_type] = v_dat

    checked_days = [(d.checked_at, d.checked_days.days) for d in
        raw_v_data[visa_type].extra(select={
            'checked_days': 'cleared_at - checked_at'})]
    aggr_plot_data[visa_type] = dataprocessing.split_aggregate(checked_days,
                                                               date_interval=3,
                                                               use_none=True)

  # Calculate weekday distribution
  cleared_dates = [
      d.strftime("%A") for d in base_.values_list('cleared_at', flat=True) if d]
  weekday_dist = collections.defaultdict(int)
  for x in cleared_dates:
    weekday_dist[x] += 1
  weekday = sorted([(k, v) for (k, v) in weekday_dist.items()],
      key=lambda x: x[1], reverse=True)

  # Calculate overall distribution
  last_clear_at = sorted([d.cleared_at.strftime(const.CHECKEE_TIMEFMT)
      for d in base_ if d.cleared_at])[-1]
  last_application_cleared = sorted([
      d.checked_at.strftime(const.CHECKEE_TIMEFMT)
      for d in base_ if d.cleared_at])[-1]
  valid_wait_time = [
      (d.cleared_at - d.checked_at).days for d in base_ if d.cleared_at]
  avg_wait_time = int(round(1.0 * sum(valid_wait_time) / len(valid_wait_time)))

  # Split and show distribution
  total_checked_ = [
      (d['checked_at'], d['total'])
      for d in base_.values('checked_at').annotate(total=Count('checked_at'))
      if d['checked_at']]
  total_cleared_ = [
      (d['cleared_at'], d['total'])
      for d in base_.values('cleared_at').annotate(total=Count('cleared_at'))
      if d['cleared_at']]

  total_checked = dataprocessing.split_aggregate(total_checked_,
                                                 date_interval=1,
                                                 start_date=start_date,
                                                 as_dict=False)
  total_cleared = dataprocessing.split_aggregate(total_cleared_,
                                                 date_interval=1,
                                                 start_date=start_date,
                                                 as_dict=False)
  return render_to_response('dashboard.html', {
      'last_clear_at': last_clear_at,
      'last_application_cleared': last_application_cleared,
      'avg_wait_time': avg_wait_time,
      'raw_data': raw_s_data,
      'aggr_data': aggr_data,
      'aggr_plot_data': DateTimeJSONEncoder().encode(aggr_plot_data),
      'weekday': DateTimeJSONEncoder().encode(weekday),
      'total_checked': DateTimeJSONEncoder().encode(total_checked),
      'total_cleared': DateTimeJSONEncoder().encode(total_cleared),
  })
Exemple #5
0
def visa_type_details(request, visa_type):
  try:
    days = int(request.GET.get('days'))
  except:
    days = 90

  if days < 10:
    days = 10

  if days > 120:
    days = 120

  base_ = Checkee.objects.filter(
      checked_at__gt=(days_ago(days)),
      visa_type=visa_type
  )
  start_date_ = base_.order_by('checked_at')[0].checked_at
  pending = Q(application_status='Pending')

  raw_data_dist_ = base_.filter(~pending).values_list('checked_at', 'cleared_at')
  raw_data_dist = sorted([
      ((d1 - days_ago(days)).days, (d2 - d1).days)
      for (d1, d2) in raw_data_dist_])

  c = {'visa_type': visa_type}
  total_ = [
      (d['checked_at'], d['total'])
      for d in base_.values('checked_at').annotate(total=Count('checked_at'))]
  cleared_ = [
      (d['checked_at'], d['total'])
      for d in base_.filter(~pending).values('checked_at').annotate(
      total=Count('checked_at'))]
  cleared_ = [
      (d['checked_at'], d['total'])
      for d in base_.filter(~pending).values('checked_at').annotate(
      total=Count('checked_at'))]

  checked_days_ = [(d.checked_at, d.checked_days.days)
      for d in base_.filter(~pending).extra(select={
          'checked_days': 'cleared_at - checked_at'})]

  total = dataprocessing.split_aggregate(total_, start_date=start_date_,
                                         as_dict=True, include_zero=False)
  cleared = dataprocessing.split_aggregate(cleared_, start_date=start_date_,
                                           as_dict=True, include_zero=False)
  checked_days = dataprocessing.split_aggregate(checked_days_,
                                                start_date=start_date_,
                                                as_dict=True,
                                                include_zero=False)
  keys = set(total.keys() + cleared.keys())
  raw_data = []
  norm_data = []
  for k in sorted(keys):
    t1, t2, days_ = (0, 0, None)

    if k in total:
      t1 = total[k]
    if k in cleared:
      t2 = cleared[k]

    raw_data.append([k, (t1 - t2), t2])  # Date, Pending, Cleared
    # Calculate ratio
    t4 = 100 * t2 / t1
    norm_data.append([k, t4])

  # Pending cases data table
  pending_cases = list(base_.filter(pending).order_by(
      'checked_at').values_list('checked_at', 'consulate', 'major'))

  cleared_cases_ = base_.filter(~pending).order_by(
      'checked_at', 'cleared_at').values_list('checked_at', 'cleared_at',
                                              'consulate', 'major')

  cleared_cases = [(ii, jj, (jj-ii).days, kk, ll)
                   for (ii, jj, kk, ll) in cleared_cases_]

  c['raw_data_dist'] = DateTimeJSONEncoder().encode(raw_data_dist)
  c['norm_data'] = DateTimeJSONEncoder().encode(norm_data)
  c['checked_days'] = DateTimeJSONEncoder().encode(checked_days)
  c['pending_cases'] = DateTimeJSONEncoder().encode(pending_cases)
  c['cleared_cases'] = DateTimeJSONEncoder().encode(cleared_cases)
  c['raw_data'] = DateTimeJSONEncoder().encode(raw_data)
  c['days'] = days
  return render_to_response('visa_details.html', c)