示例#1
0
def test_max_rows_pipe():
    alt.data_transformers.enable('gpd_to_values', max_rows=1)
    data = geojson2gdp(_create_geojson())
    with pytest.raises(alt.MaxRowsError):
        data = alt.pipe(data, alt.data_transformers.get())
    alt.data_transformers.enable('gpd_to_json', max_rows=1)
    with pytest.raises(alt.MaxRowsError):
        data = alt.pipe(data, alt.data_transformers.get())
    alt.data_transformers.enable('gpd_to_values')
示例#2
0
文件: geodata.py 项目: afonit/gpdvega
def gpd_to_json(data):
    """Write the data model to a .json file and return a url based data model.
    For ``geopandas.GeoDataFrame`` columns values are stored as Foreign Members
    of GeoJSON feature objects. For all other types uses function
    :py:func:`altair.to_json`."""
    if isinstance(data, gpd.GeoDataFrame):
        data = alt.utils.sanitize_dataframe(data)
        values = geopandas_to_dict(data)
        return alt.to_json({'values': values})
    else:
        return alt.to_json(data)


alt.data_transformers.register(
    'gpd_to_values',
    lambda data: alt.pipe(data, alt.limit_rows, gpd_to_values))
alt.data_transformers.register(
    'gpd_to_json', lambda data: alt.pipe(data, alt.limit_rows, gpd_to_json))

alt.data_transformers.enable('gpd_to_values')


def geojson_feature(data, feature='features', **kwargs):
    """A convenience function for extracting features from a geojson object or url

    Parameters
    ----------

    data : anyOf(string, geojson.GeoJSON)
        string is interpreted as URL from which to load the data set.
        geojson.GeoJSON is interpreted as data set itself.
示例#3
0
prior.head()

# Creating final data set
data = pd.merge(orders, order_products, on='order_id')
data.head()

data = pd.merge(data, products, on='product_id')
data.head()

data = pd.merge(data, aisles, on='aisle_id')
data = pd.merge(data, departments, on='department_id')
data.head()

# Explanatory data analysis
from altair import pipe, limit_rows, to_values
t = lambda data: pipe(data, limit_rows(max_rows=1000000000), to_values)
alt.data_transformers.register('custom', t)
alt.data_transformers.enable('custom')

#How many orders, items, and users are in each eval set?
count = pd.DataFrame(orders.groupby('eval_set')['order_number'].count()) 
count['i'] = count.index
count

n = alt.Chart(count, height = 350, width = 450,title = 'Orders as per Evaluation Set').mark_bar().encode(
    x='i',
    y='order_number',
    color = alt.Color('i', legend=None)
).configure_axis(
    labelFontSize=16,
    titleFontSize=16,
示例#4
0
def filter_nulltime_json(data):
    if 'time' in data:
        data = data[data.time > 0]
    return altair.pipe(data, altair.to_json)
示例#5
0
def json_dir(data, data_dir="altairdata"):
    data_dir = Path(data_dir)
    data_dir.mkdir(exist_ok=True)
    return alt.pipe(
        data,
        alt.to_json(filename=str(data_dir / "{prefix}-{hash}.{extension}")))
示例#6
0
def filter_nulltime_json(data):
    if 'time' in data:
        data = data[data.time > 0]
    return altair.pipe(data, altair.to_json)
示例#7
0
df2.head()


# In[18]:


import pandas as pd
from fbprophet import Prophet
import matplotlib
import altair as alt
alt.renderers.enable('notebook')
from vega_datasets import data

from altair import pipe, limit_rows, to_values
t = lambda data: pipe(data, limit_rows(max_rows=110000), to_values) # default is 5000 rows, changed to 6000 to display total rows.
alt.data_transformers.register('custom', t)
alt.data_transformers.enable('custom')

#df1 = pd.read_excel("C:\\Users\\yapingxu\\Documents\\TimeSeriesPython\\Oahu\\DataCSV - Copy.xlsx")
def fit_predict_model(dataframe, interval_width = 0.999999999, changepoint_range = 0.99):
    m = Prophet(daily_seasonality = False, yearly_seasonality = False, weekly_seasonality = False,
                seasonality_mode = 'multiplicative', 
                interval_width = interval_width,
                changepoint_range = changepoint_range)
    m = m.fit(dataframe)
    forecast = m.predict(dataframe)
    forecast['fact'] = dataframe['y'].reset_index(drop = True)
    forecast['Id'] = dataframe['Id']
    return forecast
    
示例#8
0
def gpd_to_json_data_transformer(data, max_rows=5000):
    return alt.pipe(data, alt.limit_rows(max_rows=max_rows), gpd_to_json)