def test_aggregate_dframe_nan_path(self):
     path = Path([Path([[(0.2, 0.3), (0.4, 0.7)], [(0.4, 0.7), (0.8, 0.99)]]).dframe()])
     expected = Image(([0.25, 0.75], [0.25, 0.75], [[1, 0], [2, 1]]),
                      vdims=['Count'])
     img = aggregate(path, dynamic=False,  x_range=(0, 1), y_range=(0, 1),
                     width=2, height=2)
     self.assertEqual(img, expected)
 def test_aggregate_curve(self):
     curve = Curve([(0.2, 0.3), (0.4, 0.7), (0.8, 0.99)])
     expected = Image(([0.25, 0.75], [0.25, 0.75], [[1, 0], [1, 1]]),
                      vdims=['Count'])
     img = aggregate(curve, dynamic=False,  x_range=(0, 1), y_range=(0, 1),
                     width=2, height=2)
     self.assertEqual(img, expected)
Exemple #3
0
 def test_aggregate_dframe_nan_path(self):
     path = Path([Path([[(0.2, 0.3), (0.4, 0.7)], [(0.4, 0.7), (0.8, 0.99)]]).dframe()])
     expected = Image(([0.25, 0.75], [0.25, 0.75], [[1, 0], [2, 1]]),
                      vdims=['Count'])
     img = aggregate(path, dynamic=False,  x_range=(0, 1), y_range=(0, 1),
                     width=2, height=2)
     self.assertEqual(img, expected)
Exemple #4
0
 def test_aggregate_ndoverlay_count_cat_datetimes_microsecond_timebase(
         self):
     dates = pd.date_range(start="2016-01-01", end="2016-01-03", freq='1D')
     xstart = np.datetime64('2015-12-31T23:59:59.723518000', 'us')
     xend = np.datetime64('2016-01-03T00:00:00.276482000', 'us')
     curve = Curve((dates, [1, 2, 3]))
     curve2 = Curve((dates, [3, 2, 1]))
     ndoverlay = NdOverlay({0: curve, 1: curve2}, 'Cat')
     imgs = aggregate(ndoverlay,
                      aggregator=ds.count_cat('Cat'),
                      width=2,
                      height=2,
                      x_range=(xstart, xend),
                      dynamic=False)
     bounds = (np.datetime64('2015-12-31T23:59:59.723518'), 1.0,
               np.datetime64('2016-01-03T00:00:00.276482'), 3.0)
     dates = [
         np.datetime64('2016-01-01T11:59:59.861759000', ),
         np.datetime64('2016-01-02T12:00:00.138241000')
     ]
     expected = Image((dates, [1.5, 2.5], [[1, 0], [0, 2]]),
                      datatype=['xarray'],
                      bounds=bounds,
                      vdims='Count')
     expected2 = Image((dates, [1.5, 2.5], [[0, 1], [1, 1]]),
                       datatype=['xarray'],
                       bounds=bounds,
                       vdims='Count')
     self.assertEqual(imgs[0], expected)
     self.assertEqual(imgs[1], expected2)
Exemple #5
0
 def test_aggregate_points_sampling(self):
     points = Points([(0.2, 0.3), (0.4, 0.7), (0, 0.99)])
     expected = Image(([0.25, 0.75], [0.25, 0.75], [[1, 0], [2, 0]]),
                      vdims=['Count'])
     img = aggregate(points, dynamic=False,  x_range=(0, 1), y_range=(0, 1),
                     x_sampling=0.5, y_sampling=0.5)
     self.assertEqual(img, expected)
Exemple #6
0
 def test_aggregate_curve(self):
     curve = Curve([(0.2, 0.3), (0.4, 0.7), (0.8, 0.99)])
     expected = Image(([0.25, 0.75], [0.25, 0.75], [[1, 0], [1, 1]]),
                      vdims=['Count'])
     img = aggregate(curve, dynamic=False,  x_range=(0, 1), y_range=(0, 1),
                     width=2, height=2)
     self.assertEqual(img, expected)
Exemple #7
0
    def water_hover_gen(self,
                        x_range,
                        FFT_Size='256',
                        Percent_Overlap='50',
                        Num_Avgs='1',
                        Window='blackmanharris'):
        width = 128
        height = 128
        fft_size = int(FFT_Size)
        if fft_size < 128:
            width = fft_size
            height = fft_size

        if self.hover_count > self.update_count:
            self.gen_spec_points(x_range, FFT_Size, Percent_Overlap, Num_Avgs,
                                 Window)
        self.hover_count += 1

        opts_hover = dict(tools=['hover'],
                          alpha=0,
                          hover_alpha=0.2,
                          fill_alpha=0)
        agg = aggregate(self.points,
                        width=width,
                        height=height,
                        dynamic=False,
                        aggregator=ds.max('PowerdB'),
                        x_sampling=self.step_size,
                        y_sampling=1)
        return hv.QuadMesh(agg).options(**opts_hover)
 def test_aggregate_points_sampling(self):
     points = Points([(0.2, 0.3), (0.4, 0.7), (0, 0.99)])
     expected = Image(([0.25, 0.75], [0.25, 0.75], [[1, 0], [2, 0]]),
                      vdims=['Count'])
     img = aggregate(points, dynamic=False,  x_range=(0, 1), y_range=(0, 1),
                     x_sampling=0.5, y_sampling=0.5)
     self.assertEqual(img, expected)
Exemple #9
0
 def test_aggregate_ndoverlay(self):
     ds = Dataset([(0.2, 0.3, 0), (0.4, 0.7, 1), (0, 0.99, 2)], kdims=['x', 'y', 'z'])
     ndoverlay = ds.to(Points, ['x', 'y'], [], 'z').overlay()
     expected = Image(([0.25, 0.75], [0.25, 0.75], [[1, 0], [2, 0]]),
                      vdims=['Count'])
     img = aggregate(ndoverlay, dynamic=False,  x_range=(0, 1), y_range=(0, 1),
                     width=2, height=2)
     self.assertEqual(img, expected)
 def test_aggregate_ndoverlay(self):
     ds = Dataset([(0.2, 0.3, 0), (0.4, 0.7, 1), (0, 0.99, 2)], kdims=['x', 'y', 'z'])
     ndoverlay = ds.to(Points, ['x', 'y'], [], 'z').overlay()
     expected = Image(([0.25, 0.75], [0.25, 0.75], [[1, 0], [2, 0]]),
                      vdims=['Count'])
     img = aggregate(ndoverlay, dynamic=False,  x_range=(0, 1), y_range=(0, 1),
                     width=2, height=2)
     self.assertEqual(img, expected)
Exemple #11
0
 def test_aggregate_points_cudf(self):
     points = Points([(0.2, 0.3), (0.4, 0.7), (0, 0.99)], datatype=['cuDF'])
     self.assertIsInstance(points.data, cudf.DataFrame)
     img = aggregate(points, dynamic=False,  x_range=(0, 1), y_range=(0, 1),
                     width=2, height=2)
     expected = Image(([0.25, 0.75], [0.25, 0.75], [[1, 0], [2, 0]]),
                      vdims=['Count'])
     self.assertIsInstance(img.data.Count.data, cupy.ndarray)
     self.assertEqual(img, expected)
Exemple #12
0
 def test_aggregate_points_categorical(self):
     points = Points([(0.2, 0.3, 'A'), (0.4, 0.7, 'B'), (0, 0.99, 'C')], vdims='z')
     img = aggregate(points, dynamic=False,  x_range=(0, 1), y_range=(0, 1),
                     width=2, height=2, aggregator=ds.count_cat('z'))
     xs, ys = [0.25, 0.75], [0.25, 0.75]
     expected = NdOverlay({'A': Image((xs, ys, [[1, 0], [0, 0]]), vdims='z Count'),
                           'B': Image((xs, ys, [[0, 0], [1, 0]]), vdims='z Count'),
                           'C': Image((xs, ys, [[0, 0], [1, 0]]), vdims='z Count')},
                          kdims=['z'])
     self.assertEqual(img, expected)
 def test_aggregate_points_categorical(self):
     points = Points([(0.2, 0.3, 'A'), (0.4, 0.7, 'B'), (0, 0.99, 'C')], vdims='z')
     img = aggregate(points, dynamic=False,  x_range=(0, 1), y_range=(0, 1),
                     width=2, height=2, aggregator=ds.count_cat('z'))
     xs, ys = [0.25, 0.75], [0.25, 0.75]
     expected = NdOverlay({'A': Image((xs, ys, [[1, 0], [0, 0]]), vdims='z Count'),
                           'B': Image((xs, ys, [[0, 0], [1, 0]]), vdims='z Count'),
                           'C': Image((xs, ys, [[0, 0], [1, 0]]), vdims='z Count')},
                          kdims=['z'])
     self.assertEqual(img, expected)
 def test_aggregate_curve_datetimes(self):
     dates = pd.date_range(start="2016-01-01", end="2016-01-03", freq='1D')
     curve = Curve((dates, [1, 2, 3]))
     img = aggregate(curve, width=2, height=2, dynamic=False)
     bounds = (np.datetime64('2016-01-01T00:00:00.000000'), 1.0,
               np.datetime64('2016-01-03T00:00:00.000000'), 3.0)
     dates = [np.datetime64('2016-01-01T12:00:00.000000000'),
              np.datetime64('2016-01-02T12:00:00.000000000')]
     expected = Image((dates, [1.5, 2.5], [[1, 0], [0, 2]]),
                      datatype=['xarray'], bounds=bounds, vdims='Count')
     self.assertEqual(img, expected)
 def test_aggregate_points_categorical_zero_range(self):
     points = Points([(0.2, 0.3, 'A'), (0.4, 0.7, 'B'), (0, 0.99, 'C')], vdims='z')
     img = aggregate(points, dynamic=False,  x_range=(0, 0), y_range=(0, 1),
                     aggregator=ds.count_cat('z'))
     xs, ys = [], [0.25, 0.75]
     params = dict(bounds=(0, 0, 0, 1), xdensity=1)
     expected = NdOverlay({'A': Image((xs, ys, np.zeros((2, 0))), vdims='z Count', **params),
                           'B': Image((xs, ys, np.zeros((2, 0))), vdims='z Count', **params),
                           'C': Image((xs, ys, np.zeros((2, 0))), vdims='z Count', **params)},
                          kdims=['z'])
     self.assertEqual(img, expected)
Exemple #16
0
 def test_aggregate_points_categorical_zero_range(self):
     points = Points([(0.2, 0.3, 'A'), (0.4, 0.7, 'B'), (0, 0.99, 'C')], vdims='z')
     img = aggregate(points, dynamic=False,  x_range=(0, 0), y_range=(0, 1),
                     aggregator=ds.count_cat('z'))
     xs, ys = [], [0.25, 0.75]
     params = dict(bounds=(0, 0, 0, 1), xdensity=1)
     expected = NdOverlay({'A': Image((xs, ys, np.zeros((2, 0))), vdims='z Count', **params),
                           'B': Image((xs, ys, np.zeros((2, 0))), vdims='z Count', **params),
                           'C': Image((xs, ys, np.zeros((2, 0))), vdims='z Count', **params)},
                          kdims=['z'])
     self.assertEqual(img, expected)
Exemple #17
0
 def test_aggregate_curve_datetimes(self):
     dates = pd.date_range(start="2016-01-01", end="2016-01-03", freq='1D')
     curve = Curve((dates, [1, 2, 3]))
     img = aggregate(curve, width=2, height=2, dynamic=False)
     bounds = (np.datetime64('2016-01-01T00:00:00.000000'), 1.0,
               np.datetime64('2016-01-03T00:00:00.000000'), 3.0)
     dates = [np.datetime64('2016-01-01T12:00:00.000000000'),
              np.datetime64('2016-01-02T12:00:00.000000000')]
     expected = Image((dates, [1.5, 2.5], [[1, 0], [0, 2]]),
                      datatype=['xarray'], bounds=bounds, vdims='Count')
     self.assertEqual(img, expected)
 def test_aggregate_curve_datetimes_microsecond_timebase(self):
     dates = pd.date_range(start="2016-01-01", end="2016-01-03", freq='1D')
     xstart = np.datetime64('2015-12-31T23:59:59.723518000', 'us')
     xend = np.datetime64('2016-01-03T00:00:00.276482000', 'us')
     curve = Curve((dates, [1, 2, 3]))
     img = aggregate(curve, width=2, height=2, x_range=(xstart, xend), dynamic=False)
     bounds = (np.datetime64('2015-12-31T23:59:59.723518'), 1.0,
               np.datetime64('2016-01-03T00:00:00.276482'), 3.0)
     dates = [np.datetime64('2016-01-01T11:59:59.861759000',),
              np.datetime64('2016-01-02T12:00:00.138241000')]
     expected = Image((dates, [1.5, 2.5], [[1, 0], [0, 2]]),
                      datatype=['xarray'], bounds=bounds, vdims='Count')
     self.assertEqual(img, expected)
Exemple #19
0
 def test_aggregate_curve_datetimes_microsecond_timebase(self):
     dates = pd.date_range(start="2016-01-01", end="2016-01-03", freq='1D')
     xstart = np.datetime64('2015-12-31T23:59:59.723518000', 'us')
     xend = np.datetime64('2016-01-03T00:00:00.276482000', 'us')
     curve = Curve((dates, [1, 2, 3]))
     img = aggregate(curve, width=2, height=2, x_range=(xstart, xend), dynamic=False)
     bounds = (np.datetime64('2015-12-31T23:59:59.723518'), 1.0,
               np.datetime64('2016-01-03T00:00:00.276482'), 3.0)
     dates = [np.datetime64('2016-01-01T11:59:59.861759000',),
              np.datetime64('2016-01-02T12:00:00.138241000')]
     expected = Image((dates, [1.5, 2.5], [[1, 0], [0, 2]]),
                      datatype=['xarray'], bounds=bounds, vdims='Count')
     self.assertEqual(img, expected)
 def test_aggregate_curve_datetimes_dask(self):
     df = pd.DataFrame(
         data=np.arange(1000), columns=['a'],
         index=pd.date_range('2019-01-01', freq='1T', periods=1000),
     )
     ddf = dd.from_pandas(df, npartitions=4)
     curve = Curve(ddf, kdims=['index'], vdims=['a'])
     img = aggregate(curve, width=2, height=3, dynamic=False)
     bounds = (np.datetime64('2019-01-01T00:00:00.000000'), 0.0,
               np.datetime64('2019-01-01T16:39:00.000000'), 999.0)
     dates = [np.datetime64('2019-01-01T04:09:45.000000000'),
              np.datetime64('2019-01-01T12:29:15.000000000')]
     expected = Image((dates, [166.5, 499.5, 832.5], [[333, 0], [167, 166], [0, 334]]),
                      ['index', 'a'], 'Count', datatype=['xarray'], bounds=bounds)
     self.assertEqual(img, expected)
Exemple #21
0
 def test_aggregate_curve_datetimes_dask(self):
     df = pd.DataFrame(
         data=np.arange(1000), columns=['a'],
         index=pd.date_range('2019-01-01', freq='1T', periods=1000),
     )
     ddf = dd.from_pandas(df, npartitions=4)
     curve = Curve(ddf, kdims=['index'], vdims=['a'])
     img = aggregate(curve, width=2, height=3, dynamic=False)
     bounds = (np.datetime64('2019-01-01T00:00:00.000000'), 0.0,
               np.datetime64('2019-01-01T16:39:00.000000'), 999.0)
     dates = [np.datetime64('2019-01-01T04:09:45.000000000'),
              np.datetime64('2019-01-01T12:29:15.000000000')]
     expected = Image((dates, [166.5, 499.5, 832.5], [[332, 0], [167, 166], [0, 334]]),
                      ['index', 'a'], 'Count', datatype=['xarray'], bounds=bounds)
     self.assertEqual(img, expected)
Exemple #22
0
def holoviews_radial_profiles(weight_by=None):
    dens = np.log10(rb['H_nuclei_density'])
    temp = np.log10(rb['Temperature'])
    Zgas = np.log10(rb['metallicity'])
    cell_mass = rb['cell_mass'].in_units('Msun')
    cell_volume = rb['cell_volume'].in_units('kpc**3')
    x = rb['x']
    y = rb['y']
    z = rb['z']

    halo_center = ds.arr(rb_center,'code_length')
    dist = np.sqrt((halo_center[0]-rb['x'])**2.+(halo_center[1]-rb['y'])**2.+(halo_center[2]-rb['z'])**2.).in_units('kpc')

    df = pd.DataFrame({'temp':temp, 'dens':dens, 'Zgas':Zgas,'cell_volume':cell_volume,
                        'x':x,'y':y,'z':z,'dist':dist,'cell_mass':cell_mass})

    temp_dist = hv.Scatter(df,kdims=['dist'],vdims=['temp'],label="Temperature ")
    dens_dist = hv.Scatter(df,kdims=['dist'],vdims=['dens'],label='Hydrogen Number Density')
    metal_dist = hv.Scatter(df,kdims=['dist'],vdims=['Zgas'],label='Metallicity')

    if weight_by == None:
        dist_plots = (datashade(temp_dist,cmap=cm.Reds, dynamic=False,x_range=(0,60),y_range=(2,8.4)).opts(plot=dict(aspect='square'))
                    + datashade(dens_dist,cmap=cm.Blues, dynamic=False,x_range=(0,60),y_range=(-6.5,2)).opts(plot=dict(aspect='square'))
                    + datashade(metal_dist,cmap=cm.BuGn, dynamic=False,x_range=(0,60),y_range=(-8.5,1.4)).opts(plot=dict(aspect='square')))
        fileout= 'basic_profile_'+args[-3]+'_'+args[-1]

    if weight_by == 'cell_mass':
        temp_shade = aggregate(hv.Scatter(df,['dist','temp']),y_range=(2,8.4),aggregator=dshade.sum('cell_mass'))
        temp_shade = temp_shade.opts(plot=dict(colorbar=True,aspect='square',logz=True),style=dict(cmap=cm.Reds))
        dens_shade = aggregate(hv.Scatter(df,['dist','dens']),y_range=(-7,2.5),aggregator=dshade.sum('cell_mass'))
        dens_shade = dens_shade.opts(plot=dict(colorbar=True,aspect='square',logz=True),style=dict(cmap=cm.Blues))
        metal_shade = aggregate(hv.Scatter(df,['dist','Zgas']),y_range=(-7,2.5),aggregator=dshade.sum('cell_mass'))
        metal_shade = metal_shade.opts(plot=dict(colorbar=True,aspect='square',logz=True),style=dict(cmap=cm.BuGn))

        dist_plots = (temp_shade + dens_shade + metal_shade)
        fileout = 'basic_profile_cell_mass_'+args[-3]+'_'+args[-1]

    if weight_by == 'cell_volume':
        temp_shade = aggregate(hv.Scatter(df,['dist','temp']),y_range=(2,8.4),aggregator=dshade.sum('cell_volume'))
        temp_shade = temp_shade.opts(plot=dict(colorbar=True,aspect='square',logz=True),style=dict(cmap=cm.Reds))
        dens_shade = aggregate(hv.Scatter(df,['dist','dens']),y_range=(-7,2.5),aggregator=dshade.sum('cell_volume'))
        dens_shade = dens_shade.opts(plot=dict(colorbar=True,aspect='square',logz=True),style=dict(cmap=cm.Blues))
        metal_shade = aggregate(hv.Scatter(df,['dist','Zgas']),y_range=(-7,2.5),aggregator=dshade.sum('cell_volume'))
        metal_shade = metal_shade.opts(plot=dict(colorbar=True,aspect='square',logz=True),style=dict(cmap=cm.BuGn))

        dist_plots = (temp_shade + dens_shade + metal_shade)
        fileout = 'basic_profile_cell_vol_'+args[-3]+'_'+args[-1]

    renderer = Store.renderers['matplotlib'].instance(fig='pdf', holomap='gif')
    renderer.save(dist_plots, fileout)
    return
 def test_aggregate_ndoverlay_count_cat_datetimes_microsecond_timebase(self):
     dates = pd.date_range(start="2016-01-01", end="2016-01-03", freq='1D')
     xstart = np.datetime64('2015-12-31T23:59:59.723518000', 'us')
     xend = np.datetime64('2016-01-03T00:00:00.276482000', 'us')
     curve = Curve((dates, [1, 2, 3]))
     curve2 = Curve((dates, [3, 2, 1]))
     ndoverlay = NdOverlay({0: curve, 1: curve2}, 'Cat')
     imgs = aggregate(ndoverlay, aggregator=ds.count_cat('Cat'), width=2, height=2,
                      x_range=(xstart, xend), dynamic=False)
     bounds = (np.datetime64('2015-12-31T23:59:59.723518'), 1.0,
               np.datetime64('2016-01-03T00:00:00.276482'), 3.0)
     dates = [np.datetime64('2016-01-01T11:59:59.861759000',),
              np.datetime64('2016-01-02T12:00:00.138241000')]
     expected = Image((dates, [1.5, 2.5], [[1, 0], [0, 2]]),
                      datatype=['xarray'], bounds=bounds, vdims='Count')
     expected2 = Image((dates, [1.5, 2.5], [[0, 1], [1, 1]]),
                      datatype=['xarray'], bounds=bounds, vdims='Count')
     self.assertEqual(imgs[0], expected)
     self.assertEqual(imgs[1], expected2)
Exemple #24
0
    def make_sky(self,
                 object_type,
                 ra_range=None,
                 dec_range=None,
                 x_range=None,
                 y_range=None,
                 **kwargs):

        if object_type == 'all':
            dset = self.ds
        else:
            dset = self.ds.select(label=object_type)

        if x_range is not None and y_range is not None:
            dset = dset.select(x=x_range, y=y_range)

        self._selected = dset.data.id

        pts = dset.to(hv.Points, kdims=['ra', 'dec'], vdims=['y'], groupby=[])
        agg = aggregate(pts,
                        width=100,
                        height=100,
                        x_range=ra_range,
                        y_range=dec_range,
                        aggregator=ds.mean('y'),
                        dynamic=False)
        hover = hv.QuadMesh(agg).opts(
            '[tools=["hover"]] (alpha=0 hover_alpha=0.2)')
        shaded = dynspread(
            datashade(pts,
                      x_range=ra_range,
                      y_range=dec_range,
                      dynamic=False,
                      cmap=cc.palette['coolwarm'],
                      aggregator=ds.mean('y')))
        shaded = shaded.opts('RGB [width=400, height=400]')

        return (shaded * hover).relabel('{} ({})'.format(
            object_type, len(dset)))
                  usecols=usecols)
ddf['hour'] = ddf.tpep_pickup_datetime.dt.hour
ddf = ddf.persist()

from bokeh.models import WMTSTileSource
url = 'https://server.arcgisonline.com/ArcGIS/rest/services/World_Imagery/MapServer/tile/{Z}/{Y}/{X}.jpg'
wmts = gv.WMTS(WMTSTileSource(url=url))

stream = hv.streams.Stream.define('HourSelect', hour=0)()
points = hv.Points(ddf, kdims=['dropoff_x', 'dropoff_y'])
dmap = hv.util.Dynamic(points,
                       operation=lambda obj, hour: obj.select(hour=hour),
                       streams=[stream])

# Apply aggregation
aggregated = aggregate(dmap, link_inputs=True)

# Shade the data
shaded = shade(aggregated)

# Define PointerX stream, attach to points and declare DynamicMap for cross-section and VLine
pointer = hv.streams.PointerX(x=ddf.dropoff_x.loc[0].compute().iloc[0],
                              source=points)
section = hv.util.Dynamic(aggregated,
                          operation=lambda obj, x: obj.sample(dropoff_x=x),
                          streams=[pointer],
                          link_inputs=False)
vline = hv.DynamicMap(lambda x: hv.VLine(x), streams=[pointer])

# Define options
hv.opts(
Exemple #26
0
df1 = dd.from_pandas(df_test, npartitions=1) 

# Set plot and style options
hv.util.opts('Image [width=400 height=400 shared_axes=False logz=True] {+axiswise} ')
hv.util.opts("HLine VLine (color='white' line_width=1) Layout [shared_axes=False] ")
hv.util.opts("Curve [xaxis=None yaxis=None show_grid=False, show_frame=False] (color='orangered') {+framewise}")

# Read the short NYC CSV file over in the data directory 
#df2 = dd.read_csv('../data/nyc_taxi_short.csv',usecols=['b_band', 'r_band'])
df = df1.persist()

# Reproject points from Mercator to PlateCarree (latitude/longitude)
points = gv.Points(df, kdims=['color', 'r_band'], vdims=[], crs=ccrs.GOOGLE_MERCATOR)
projected = gv.operation.project_points(points, projection=ccrs.PlateCarree())
projected = projected.redim(color='lon', r_band='latR')

# Use datashader to rasterize and linked streams for interactivity
agg = aggregate(projected, link_inputs=True, x_sampling=0.0000001, y_sampling=0.0000001)
pointerx = hv.streams.PointerX(x=-74, source=projected)
pointery = hv.streams.PointerY(y=40.8,  source=projected)
vline = hv.DynamicMap(lambda x: hv.VLine(x), streams=[pointerx])
hline = hv.DynamicMap(lambda y: hv.HLine(y), streams=[pointery])

sampled = hv.util.Dynamic(agg, operation=lambda obj, x: obj.sample(lon=x),
                          streams=[pointerx], link_inputs=False)

hvobj = ((agg * hline * vline) << sampled.opts(plot={'Curve': dict(width=100)}))

doc = hv.renderer('bokeh').server_doc(hvobj)
doc.title = 'LUVOIR CMD Simulator'
    def holoview_plot(self, ):
        """
        """
        import datashader as ds
        from holoviews.operation.datashader import aggregate, shade, datashade, dynspread
        from holoviews.streams import RangeXY

        self.ds_points = self.datashade(
            "Value" if len(self._data.data_dims) > 0 else None)
        self.ds_points = self.ds_points.opts(plot=dict(width=600, height=600))

        # Hover and zoom grid tool.
        self._hover_grid = hv.util.Dynamic(
            aggregate(self._points,
                      aggregator=ds.mean("Value"),
                      width=15,
                      height=15,
                      streams=[RangeXY(source=self.ds_points)]),
            operation=hv.QuadMesh).opts(plot=dict(tools=["hover"]),
                                        style=dict(alpha=0, hover_alpha=0.2))

        # Get the points in tapped rectangle
        self._posxy = DataShaderSelect(source=self._hover_grid,
                                       dataset=self._data.embedding)

        #self._posxy = hv.streams.Tap(source=self._hover_grid)

        def _dss_logger(**kwargs):
            import logging as log
            log.info("Handling event from datashader select: %s", str(kwargs))

        self._posxy.add_subscriber(_dss_logger)

        # Make layout
        self.tap_indicators = hv.DynamicMap(self.tap_points,
                                            kdims=[],
                                            streams=[self._posxy])
        self.selected_table = hv.DynamicMap(self.tap_table,
                                            streams=[self._posxy])

        self.tap_zoom = hv.DynamicMap(
            self.focus_plot, streams=[self._posxy],
            kdims=["Counts"]).opts(norm=dict(framewise=True)).redim.values(
                Counts=self._data.data_dims)

        def _h(Counts, index, fine_index, **kwargs):
            #print index, kwargs
            from holoviews.operation import histogram

            m = {
                Counts: "Value",
                "{}_frequency": "frequency",
            }
            if len(index) > 0:
                d = self._data.embedding.iloc[index]
                if len(fine_index) > 0:
                    d = d.iloc[fine_index]
                #print "Trying", Counts
                label = "{} {} points".format(Counts, len(d))
                r = histogram(
                    hv.Points(d),
                    #self.selected_table,
                    dimension=Counts,
                    dynamic=False).redim(**m)
            else:
                label = "{} {} points".format(Counts,
                                              len(self._data.embedding))
                #print "Alt", Counts
                r = histogram(self._points[Counts],
                              dimension="Value",
                              dynamic=False).redim(Value_frequency="frequency")

            #print(r)
            return r.relabel(label)

        from holoviews import streams
        self.zoom_selection = streams.Selection1D(source=self.tap_zoom)

        self.p = hv.DynamicMap(
            _h,
            kdims=["Counts"],
            streams=[
                self.zoom_selection.rename(index="fine_index"), self._posxy
            ]).redim.values(Counts=self._data.data_dims).opts(norm=dict(
                framewise=True))


        self._layout = self.ds_points * self._hover_grid * self.tap_indicators \
                       + self.selected_table + self.tap_zoom + self.p

        self._layout = self._layout.cols(2).opts(plot={"shared_axes": False})

        return self._layout
 def test_aggregate_points_target(self):
     points = Points([(0.2, 0.3), (0.4, 0.7), (0, 0.99)])
     expected = Image(([0.25, 0.75], [0.25, 0.75], [[1, 0], [2, 0]]),
                      vdims=['Count'])
     img = aggregate(points, dynamic=False,  target=expected)
     self.assertEqual(img, expected)
Exemple #29
0
 def test_aggregate_points_target(self):
     points = Points([(0.2, 0.3), (0.4, 0.7), (0, 0.99)])
     expected = Image(([0.25, 0.75], [0.25, 0.75], [[1, 0], [2, 0]]),
                      vdims=['Count'])
     img = aggregate(points, dynamic=False,  target=expected)
     self.assertEqual(img, expected)
Exemple #30
0
               height=400,
               shared_axes=False,
               logz=True,
               xaxis=None,
               yaxis=None,
               axiswise=True), opts.HLine(color='white', line_width=1),
    opts.Layout(shared_axes=False), opts.VLine(color='white', line_width=1))

# Read the parquet file
df = dd.read_parquet('./data/nyc_taxi_wide.parq').persist()

# Declare points
points = hv.Points(df, kdims=['pickup_x', 'pickup_y'], vdims=[])

# Use datashader to rasterize and linked streams for interactivity
agg = aggregate(points, link_inputs=True, x_sampling=0.0001, y_sampling=0.0001)
pointerx = hv.streams.PointerX(x=np.mean(points.range('pickup_x')),
                               source=points)
pointery = hv.streams.PointerY(y=np.mean(points.range('pickup_y')),
                               source=points)
vline = hv.DynamicMap(lambda x: hv.VLine(x), streams=[pointerx])
hline = hv.DynamicMap(lambda y: hv.HLine(y), streams=[pointery])

sampled = hv.util.Dynamic(agg,
                          operation=lambda obj, x: obj.sample(pickup_x=x),
                          streams=[pointerx],
                          link_inputs=False)

hvobj = ((agg * hline * vline) << sampled)

# Obtain Bokeh document and set the title
Exemple #31
0
opts.defaults(
    opts.Curve(xaxis=None, yaxis=None, show_grid=False, show_frame=False,
               color='orangered', framewise=True, width=100),
    opts.Image(width=800, height=400, shared_axes=False, logz=True,
               xaxis=None, yaxis=None, axiswise=True),
    opts.HLine(color='white', line_width=1),
    opts.Layout(shared_axes=False),
    opts.VLine(color='white', line_width=1))

# Read the parquet file
df = dd.read_parquet('./data/nyc_taxi_wide.parq').persist()

# Declare points
points = hv.Points(df, kdims=['pickup_x', 'pickup_y'], vdims=[])

# Use datashader to rasterize and linked streams for interactivity
agg = aggregate(points, link_inputs=True, x_sampling=0.0001, y_sampling=0.0001)
pointerx = hv.streams.PointerX(x=np.mean(points.range('pickup_x')), source=points)
pointery = hv.streams.PointerY(y=np.mean(points.range('pickup_y')), source=points)
vline = hv.DynamicMap(lambda x: hv.VLine(x), streams=[pointerx])
hline = hv.DynamicMap(lambda y: hv.HLine(y), streams=[pointery])

sampled = hv.util.Dynamic(agg, operation=lambda obj, x: obj.sample(pickup_x=x),
                          streams=[pointerx], link_inputs=False)

hvobj = ((agg * hline * vline) << sampled)

# Obtain Bokeh document and set the title
doc = renderer.server_doc(hvobj)
doc.title = 'NYC Taxi Crosshair'
Exemple #32
0

def filter_count(agg, min_count, **kwargs):
    if min_count:
        agg = deepcopy(agg)
        agg.data.Count.data[agg.data.Count.data < min_count] = 0
    return agg


def hline_fn(min_count, **kwargs):
    return hv.VLine(min_count)


def tiles_fn(alpha, **kwargs):
    return tiles.opts(style=dict(alpha=alpha))


explorer = OSMExplorer(name="OpenStreetMap GPS Explorer")

tile = hv.DynamicMap(tiles_fn, streams=[explorer])
agg = aggregate(hv.Points(df))
filtered = hv.util.Dynamic(agg, operation=filter_count, streams=[explorer])
shaded = shade(filtered, streams=[explorer])
hline = hv.DynamicMap(hline_fn, streams=[explorer])
explorer.output = (tile * shaded) << histogram(agg, log=True) * hline

doc = parambokeh.Widgets(explorer,
                         view_position='right',
                         callback=explorer.event,
                         mode='server')
usecols = ['tpep_pickup_datetime', 'dropoff_x', 'dropoff_y']
ddf = dd.read_csv('../data/nyc_taxi.csv', parse_dates=['tpep_pickup_datetime'], usecols=usecols)
ddf['hour'] = ddf.tpep_pickup_datetime.dt.hour
ddf = ddf.persist()

from bokeh.models import WMTSTileSource
url = 'https://server.arcgisonline.com/ArcGIS/rest/services/World_Imagery/MapServer/tile/{Z}/{Y}/{X}.jpg'
wmts = gv.WMTS(WMTSTileSource(url=url))

stream = hv.streams.Stream.define('HourSelect', hour=0)()
points = hv.Points(ddf, kdims=['dropoff_x', 'dropoff_y'])
dmap = hv.util.Dynamic(points, operation=lambda obj, hour: obj.select(hour=hour),
                       streams=[stream])

# Apply aggregation
aggregated = aggregate(dmap, link_inputs=True)

# Shade the data
shaded = shade(aggregated)

# Define PointerX stream, attach to points and declare DynamicMap for cross-section and VLine
pointer = hv.streams.PointerX(x=ddf.dropoff_x.loc[0].compute().iloc[0], source=points)
section = hv.util.Dynamic(aggregated, operation=lambda obj, x: obj.sample(dropoff_x=x),
                          streams=[pointer], link_inputs=False)
vline = hv.DynamicMap(lambda x: hv.VLine(x), streams=[pointer])

# Define options
hv.opts("RGB [width=800 height=600 xaxis=None yaxis=None] VLine (color='black' line_width=1)")
hv.opts("Curve [width=100 yaxis=None show_frame=False] (color='black') {+framewise} Layout [shared_axes=False]")

# Combine it all into a complex layout

def filter_count(agg, min_count, **kwargs):
    if min_count:
        agg = deepcopy(agg)
        agg.data.Count.data[agg.data.Count.data < min_count] = 0
    return agg


def hline_fn(min_count, **kwargs):
    return hv.VLine(min_count)


def tiles_fn(alpha, **kwargs):
    return tiles.opts(style=dict(alpha=alpha))


explorer = OSMExplorer(name="OpenStreetMap GPS Explorer")

tile = hv.DynamicMap(tiles_fn, streams=[explorer])
agg = aggregate(hv.Points(df), x_sampling=10, y_sampling=10)
filtered = hv.util.Dynamic(agg, operation=filter_count, streams=[explorer])
shaded = shade(filtered, streams=[explorer])
hline = hv.DynamicMap(hline_fn, streams=[explorer])
explorer.output = (tile * shaded) << histogram(agg, log=True) * hline

doc = parambokeh.Widgets(explorer,
                         view_position='right',
                         callback=explorer.event,
                         mode='server')
Exemple #35
0
                 'nyc_taxi_wide.parq',
                 engine='fastparquet')).persist()
tiles = EsriImagery()

stream = hv.streams.Stream.define('HourSelect', hour=0)()
points = hv.Points(ddf, kdims=['dropoff_x', 'dropoff_y'])
dmap = hv.util.Dynamic(
    points,
    operation=lambda obj, hour: obj.select(dropoff_hour=hour).relabel(
        'Hour of Day: %d' % hour),
    streams=[stream])

# Apply aggregation
aggregated = aggregate(dmap,
                       link_inputs=True,
                       streams=[hv.streams.RangeXY],
                       width=1200,
                       height=600)


# Shade the data
class ColormapPicker(hv.streams.Stream):
    colormap = param.ObjectSelector(default=cm_n["fire"],
                                    objects=cm_n.values())


cmap_picker = ColormapPicker(rename={'colormap': 'cmap'}, name='')
shaded = shade(aggregated, link_inputs=True, streams=[cmap_picker])

# Define PointerX stream, attach to points and declare DynamicMap for cross-section and VLine
pointer = hv.streams.PointerX(x=ddf.dropoff_x.loc[0].compute().iloc[0],
Exemple #36
0
def plotter_2D_test_histogram(name):
    # IMPORTING AND FOMRATTING DATA

    # ===| open root demo file with pedestal and noise values |===
    t = uproot.open("Data/CalibTree.root")["calibTree"]
    #padData = t.pandas.df("Pedestals", flatten = False)
    padData = t.array(name)

    # ===| pad plane plane meta data |===
    d = pd.read_csv("Data/pad_plane_data.txt", sep='\t')

    # ===| fuction to prepare input root demo file for plotting |===
    [vcor, vtri] = configureData(padData, d)

    # PLOTTING

    hd.shade.cmap = [
        '#FBFCBF', '#FD9F6C', '#DD4968', '#8C2980', '#3B0F6F', '#000003'
    ]
    cvs = ds.Canvas(plot_height=400, plot_width=400)

    trim = hv.TriMesh((vtri, hv.Points(vcor, vdims='za'))).opts(show_grid=True)
    trim2 = hv.TriMesh((vtri, hv.Points(vcor,
                                        vdims='zc'))).opts(show_grid=True)
    trim.opts(colorbar=True)
    trim.opts(cmap='Blues')

    trimesh = hd.datashade(trim,
                           aggregator=ds.mean('za'),
                           precompute=True,
                           link_inputs=False)
    trimesh2 = hd.datashade(trim2,
                            aggregator=ds.mean('zc'),
                            precompute=True,
                            link_inputs=False)
    trimesh.opts(height=450,
                 width=450,
                 show_grid=False,
                 xaxis=None,
                 yaxis=None)
    trimesh2.opts(height=450,
                  width=450,
                  show_grid=False,
                  xaxis=None,
                  yaxis=None)

    # ADDING INTERACTIVITY

    # Small hover tool
    tooltips_small = [("X:", "$x"), ("Y:", "$y"), ("Value:", "NaN")]
    hover_small = HoverTool(tooltips=tooltips_small)
    dynamic = hv.util.Dynamic(hd.aggregate(trim, width=30, height=30, streams=[RangeXY]),
            operation=hv.QuadMesh) \
        .opts(tools=[hover_small], alpha=0, hover_alpha=0, hover_line_color='black',hover_line_alpha=0)

    # Sector select hover tool

    sector_edge_phi = np.linspace(0, np.pi * 2, 19)
    sector_edge_r = np.array([850, 2530])
    Phi, R = np.meshgrid(sector_edge_phi, sector_edge_r)
    Qx = np.cos(Phi) * np.abs(R)
    Qy = np.sin(Phi) * np.abs(R)
    Z = np.linspace(0, 17, 18).reshape(1, 18)
    #Z = Z*0

    hover_data = dict(x=Qx, y=Qy, z=Z)

    tooltips_a = [("Side", "A"), ("Sector", "@z")]
    tooltips_c = [("Side", "C"), ("Sector", "@z")]
    hover_a = HoverTool(tooltips=tooltips_a)
    hover_c = HoverTool(tooltips=tooltips_c)

    qmesh_a = hv.QuadMesh(hover_data)\
       .opts(tools=[hover_a,'tap'], alpha=0, hover_fill_alpha=0.1, hover_color='white',
          hover_line_color='black',hover_line_alpha=1)
    qmesh_c = hv.QuadMesh(hover_data)\
       .opts(tools=[hover_c], alpha=0, hover_fill_alpha=0.1, hover_color='white',
          hover_line_color='black',hover_line_alpha=1)

    # CREATING OUTPUT

    tpc_plot_a = trimesh * qmesh_a * hv.Text(0, 0, 'A', fontsize=40)
    tpc_plot_c = trimesh2 * qmesh_c * hv.Text(0, 0, 'C', fontsize=40)
    final_layout = (tpc_plot_a + tpc_plot_c).opts(merge_tools=False)

    return final_layout