Example #1
0
def update_year_zone(year, day_range):
    """ Generate daily tally count by area/year """
    (tally, tally_zone, tally_zone_date_ranges) = data.fetch_data()

    # Clip to day range
    sliced = tally_zone.loc[(tally_zone.doy >= day_range[0])
                            & (tally_zone.doy <= day_range[1])]

    # Subset by selected year.
    de = sliced.loc[(sliced.FireSeason == year)]

    data_traces = []
    grouped = de.groupby("ProtectionUnit")
    for name, group in grouped:
        group = group.sort_values(["date_stacked"])
        group["TotalAcres"] = group["TotalAcres"].round(2)
        data_traces.extend([{
            "x": group.date_stacked,
            "y": group.TotalAcres,
            "mode": "lines",
            "name": luts.zones[name],
            "line": {
                "shape": get_line_mode(day_range),
                "width": 2
            },
            "hovertemplate": hover_conf,
        }])

    graph_layout = go.Layout(
        title="<b>Alaska Daily Tally Records by Year, " + str(year) +
        "</b><br>" + get_title_date_span(day_range),
        xaxis=xaxis_conf,
        yaxis=yaxis_conf,
    )
    return {"data": data_traces, "layout": graph_layout}
Example #2
0
def update_tally(day_range):
    """ Generate daily tally count """

    (tally, tally_zone, tally_zone_date_ranges) = data.fetch_data()
    data_traces = []

    #  Slice by day range.
    sliced = tally.loc[(tally.doy >= day_range[0])
                       & (tally.doy <= day_range[1])]

    grouped = sliced.groupby("FireSeason")
    for name, group in grouped:
        group = group.sort_values(["date_stacked"])

        if name in luts.important_years:
            hovertemplate = hover_conf
            hoverinfo = ""
            showlegend = True
        else:
            hovertemplate = None
            hoverinfo = "skip"
            showlegend = False

        data_traces.extend([{
            "x": group.date_stacked,
            "y": group.TotalAcres,
            "mode": "lines",
            "name": str(name),
            "line": {
                "color": luts.years_lines_styles[str(name)]["color"],
                "shape": get_line_mode(day_range),
                "width": luts.years_lines_styles[str(name)]["width"],
            },
            "showlegend": showlegend,
            "hoverinfo": hoverinfo,
            "hovertemplate": hovertemplate,
        }])

    # Add dummy trace with legend entry for non-big years
    data_traces.extend([
        go.Scatter(
            x=[None],
            y=[None],
            mode="lines",
            name="Other years",
            line={
                "color": luts.default_style["color"],
                "width": luts.default_style["width"],
            },
        )
    ])

    graph_layout = go.Layout(
        title="<b>Alaska Statewide Daily Tally Records, 2004-Present,</b><br>"
        + get_title_date_span(day_range),
        xaxis=xaxis_conf,
        yaxis=yaxis_conf,
    )
    return {"data": data_traces, "layout": graph_layout}
Example #3
0
    def test_sat3cell_tree(self):
        w = tf.random_normal([self.n_worlds, self.num_units])

        sat3 = csat.Sat3Cell(self.n_ops, self.num_units, self.batch_size,
                             self.n_worlds)
        nn = treenn.TreeNN(sat3, self.parser, self.batch_size)

        A, B, E = next(data.fetch_data(self.batch_size))

        y = nn(w, [nn.parser(a) for a in A])
        self.assertEqual(y.shape,
                         [self.batch_size, self.num_units, self.n_worlds])
Example #4
0
 def __init__(self):
     self.all_data = data.fetch_data()
     self.find_countries()
     self.country_params = {}
     self.state_params = {}
     self.get_real_data()
     self.simulator = simulate.simulate_epidemic(5, 1.9, 2)
     self.state_simulator = simulate.simulate_epidemic(6.5, 0.9, 1.6)
     self.colors = {
         "active": "#FFA500",
         "deaths": "#B22222",
         "recovered": "#008000",
     }
     self.get_india_data()
Example #5
0
def update_tally_zone(area, day_range):
    """ Generate daily tally count for specified protection area """
    (tally, tally_zone, tally_zone_date_ranges) = data.fetch_data()

    #  Slice by day range.
    sliced = tally_zone.loc[(tally_zone.doy >= day_range[0])
                            & (tally_zone.doy <= day_range[1])]

    # Spatial clip
    de = sliced.loc[(sliced["ProtectionUnit"] == area)]

    data_traces = []
    grouped = de.groupby("FireSeason")
    for name, group in grouped:
        group = group.sort_values(["date_stacked"])
        group["TotalAcres"] = group["TotalAcres"].round(2)
        data_traces.extend([{
            "x": group.date_stacked,
            "y": group.TotalAcres,
            "mode": "lines",
            "name": name,
            "line": {
                "color": luts.years_lines_styles[str(name)]["color"],
                "shape": get_line_mode(day_range),
                "width": luts.years_lines_styles[str(name)]["width"],
            },
            "hovertemplate": hover_conf,
        }])

    # Add dummy trace with legend entry for non-big years
    data_traces.extend([
        go.Scatter(
            x=[None],
            y=[None],
            mode="lines",
            name="Other years",
            line={
                "color": luts.default_style["color"],
                "width": luts.default_style["width"],
            },
        )
    ])

    graph_layout = go.Layout(
        title="<b>Alaska Daily Tally Records, " + luts.zones[area] +
        ", 2004-Present</b><br>" + get_title_date_span(day_range),
        xaxis=xaxis_conf,
        yaxis=yaxis_conf,
    )
    return {"data": data_traces, "layout": graph_layout}
Example #6
0
    def test_sat3_output_shape(self):
        """integration test with sat3 and treenn"""
        d_world = 10
        n_worlds = 64
        n_ops = 32
        d_embed = 8
        batch_size = 50

        parser = data.Parser(led_parser.propositional_language())
        sat3 = csat.Sat3Cell(n_ops, d_world, batch_size, n_worlds)
        nn = treenn.TreeNN(sat3, parser, batch_size)
        possibleworldsnet = pwn.PossibleWorlds(nn, n_worlds, d_world)

        A, B, E = next(data.fetch_data(batch_size))
        y = possibleworldsnet(A, B)
        self.assertEqual(y.get_shape().as_list(), [batch_size])
Example #7
0
def update_tally(community):
    """ Generate precipitation scatter chart """

    std = fetch_data(community)
    return go.Figure(
        data=[
            go.Scatter(
                name="pcpt",
                x=std["doy"],
                y=std["pcpt"],
                mode="markers",
                marker=dict(line_width=1),
            )
        ],
        layout=go.Layout(
            title="<b>Daily Precipitiation, [date range] (Anchorage)</b>"),
    )
Example #8
0
def update_tally_zone(community):
    """ Generate large bubble chart of precip info """

    std = fetch_data(community)
    std["bubble_size"] = np.interp(std["pcpt"],
                                   (std["pcpt"].min(), std["pcpt"].max()),
                                   (0, 50))
    std = std.loc[(std["bubble_size"] > 0)]
    print(std)
    return go.Figure(
        data=[
            go.Scatter(x=std["doy"],
                       y=std["year"],
                       mode="markers+text",
                       marker=dict(size=std["bubble_size"])),
        ],
        layout=go.Layout(
            title="<b>Daily Precipitiation, [date range] (Anchorage)</b>"),
    )
Example #9
0
# coding: utf-8
from flask import Flask, send_file
import statistics
import data
import simpleplot

app = Flask(__name__)
datapoints = data.fetch_data()

@app.route("/")
def index():
    """
    Renders static/index.html
    """
    return app.send_static_file('index.html')

@app.route("/aapl-in-gold")
def aapl_in_gold():
    """
    Should render a plot of the price of aapl stock in gold,
    with time as the x axis and value as the y axis
    """
    pass

@app.route("/all-as-usd")
def all_as_usd():
    """
    Should render a plot of the value of aapl stock, bitcoin and gold in usd,
    with time as the x axis and value as the y axis
    """
    pass
from fit_any_country import fit_country
from data import fetch_data, get_data

if __name__ == "__main__":
    data = fetch_data()
    for country in data.keys():
        time, time_number_days, cases_ref, deaths_ref = get_data(country)
        if len(time) > 15:
            print(country)
            time_sim, cases_sim, healthy_sim, recovered_sim, deaths_sim = \
                fit_country(country, save_to_json=True)
Example #11
0
            saver.restore(sess, tf.train.latest_checkpoint(LOG_PATH))

            endings = []
            for story, true_ending in tqdm(
                    zip(stories, true_endings),
                    desc='Conditional Ending Generation'):
                ending = self._story_continuation(sess, story, true_ending)
                endings.append(ending)

        return endings


if __name__ == '__main__':

    # Load data
    dataloader = data.fetch_data()

    train_stories = dataloader['train']
    valid_stories, valid_labels = dataloader['valid']

    # Construct the vocabulary
    vocab, inverse_vocab, max_len = data.construct_vocab(train_stories)

    encoded_train_context_, _ = data.encode_text(train_stories, max_len, vocab)

    # Append max_len tokens to the training context (for consistency during training)
    train_pads = np.full(shape=(encoded_train_context_.shape[0], max_len),
                         fill_value=vocab['<pad>'],
                         dtype=int)

    encoded_train_context = np.hstack((encoded_train_context_, train_pads))
Example #12
0
def test_fetches_datapoints_in_correct_order():
    points = data.fetch_data()
    assert isinstance(points, OrderedDict)
    keys = list(points.keys())
    assert keys == list(sorted(keys))
Example #13
0
def test_fetches_correct_amount_of_data():
    assert len(data.fetch_data()) == 848
Example #14
0
# pylint: disable=C0103,C0301
"""
GUI for app
"""

import os
from datetime import datetime
import dash_core_components as dcc
import dash_html_components as html
import dash_dangerously_set_inner_html as ddsih
import luts
import data

(tally, tally_zone, tally_zone_date_ranges) = data.fetch_data()

# For hosting
path_prefix = os.getenv("REQUESTS_PATHNAME_PREFIX") or "/"

# Used to make the chart exports nice
fig_download_configs = dict(filename="Daily_Tally_Count",
                            width="1000",
                            height="650",
                            scale=2)
fig_configs = dict(
    displayModeBar=True,
    showSendToCloud=False,
    toImageButtonOptions=fig_download_configs,
    modeBarButtonsToRemove=[
        "zoom2d",
        "pan2d",
        "select2d",
Example #15
0
import plotly.express as px
import pandas

from data import fetch_data

if __name__ == "__main__":
    cases_list = fetch_data()
    for i, data in enumerate(cases_list):
        data["day"] = i + 1
        data["active"] = data["confirmed"] - data["recovered"] - data["deaths"]

    wide_df = pandas.DataFrame(cases_list)
    tidy_df = wide_df.melt(id_vars="day",
                           value_vars=("confirmed", "deaths", "recovered",
                                       "active"),
                           var_name="type",
                           value_name="cases")

    fig = px.line(tidy_df, x="day", y="cases", color="type")

    fig.update_layout(title="Covid-19 cases in Poland",
                      xaxis_title="Days since first case",
                      yaxis_title="Number of cases",
                      font=dict(family="Arial, monospace",
                                size=18,
                                color="#7f7f7f"))

    fig.show()
# -*- coding: utf-8 -*-
"""
Created on Thu Sep 28 13:01:32 2017

@author: Jeetu
"""

import numpy as np
import pandas as pd
from imblearn.over_sampling import SMOTE
import os 
from data import fetch_data,feature_engineering
from sklearn import preprocessing

root=os.getcwd()
x_train,y_train,x_test,y_test = fetch_data(root,remove_duplicates=True,binary=False)
x_train = feature_engineering(x_train,do_normalization=False)
l=list(x_train)
x_test = feature_engineering(x_test,do_normalization=False)
print('Shape of training data after feature engineering is {}'.format(x_train.shape))
print ('Shape of test data after feature engineering is {}'.format(x_test.shape))
(y_train).value_counts().plot.barh()
maxcount=y_train.value_counts().max()
mincount=y_train.value_counts().min()

le=preprocessing.LabelEncoder()

y_train=le.fit_transform(y_train)+1

datax=pd.DataFrame()
y=pd.Series()