Exemplo n.º 1
0
def toggle_active():
    if not config.system_active:
        startup()
        config.process = subprocess.Popen('./sensor.py')
        config.system_active = True
    else:
        reset()
        config.process.kill()
        config.system_active = False
        config.alerts_triggered = 0

    return redirect(url_for('index'))
Exemplo n.º 2
0
# convert test statistic to a p-value for a given point
pVal_func = lambda TS, dec: -np.log10(0.5 * (chi2(len(llh.params)).sf(TS)
                                             + chi2(len(llh.params)).cdf(-TS)))

label = dict(TS=r"$\mathcal{TS}$",
             nsources=r"$n_S$",
             gamma=r"$\gamma$",
             )


if __name__=="__main__":

    plt = utils.plotting(backend="pdf")

    llh, mc = utils.startup(Nsrc=10)

    print(llh)

    # iterator of all-sky scan with follow up scans of most interesting points
    for i, (scan, hotspot) in enumerate(llh.all_sky_scan(
                                nside=2**6, follow_up_factor=1,
                                pVal=pVal_func,
                                hemispheres=dict(Full=np.radians([-90., 90.])))):

        if i > 0:
            # break after first follow up
            break

    for k in scan.dtype.names:
        scan[k] = hp.sphtfunc.smoothing(scan[k], sigma=np.radians(0.5))
Exemplo n.º 3
0
import utils

# convert test statistic to a p-value for a given point
pVal_func = lambda TS, dec: -np.log10(0.5 * (chi2(len(llh.params)).sf(TS)
                                             + chi2(len(llh.params)).cdf(-TS)))

label = dict(TS=r"$\mathcal{TS}$",
             nsources=r"$n_S$",
             gamma=r"$\gamma$",
             )

if __name__=="__main__":

    plt = utils.plotting(backend="pdf")

    llh, mc = utils.startup(Nsrc=10)

    print(llh)

    # iterator of all-sky scan with follow up scans of most interesting points
    for i, (scan, hotspot) in enumerate(llh.all_sky_scan(
                                nside=2**6, follow_up_factor=1,
                                pVal=pVal_func,
                                decRange=np.radians([-90., 90.]))):

        if i > 0:
            # break after first follow up
            break

    for k in scan.dtype.names:
        scan[k] = hp.sphtfunc.smoothing(scan[k], sigma=np.radians(0.5))
Exemplo n.º 4
0
# skylab
from skylab.ps_injector import PointSourceInjector
from skylab.psLLH import MultiPointSourceLLH
from skylab.utils import poisson_weight

# local
import utils


if __name__=="__main__":

    plt = utils.plotting(backend="pdf")

    # init likelihood class
    llh, mc = utils.startup()

    print(llh)

    # data plot

    gamma = np.array([2., 2.3, 2.7])

    # energy
    fig_E, ax_E = plt.subplots()
    h, b, p = ax_E.hist([llh.exp["logE"]] + [mc["logE"] for i in gamma],
                        weights=[np.ones(len(llh.exp))]
                                 + [mc["ow"] * mc["trueE"]**(-g) for g in gamma],
                        label=["Data"] + [r"$\gamma={0:.1f}$".format(g) for g in gamma],
                        color=["black"]
                        + [ax_E._get_lines.color_cycle.next() for i in range(len(gamma))],
Exemplo n.º 5
0
# ========================================================================= #
"""
Initialize base parameters
"""
parser = ArgumentParser()
parser.add_argument('-data', '--data_path', type=str, help='path to data folder', default=None, required=False)
args = parser.parse_args()
# path of the config file. original file is provided with the code
json_path = r'./config.json'

# get available device - if GPU will auto detect and use, otherwise will use CPU
device = torch.device("cuda" if torch.cuda.is_available() else "cpu")
print('Your current Device is: ', torch.cuda.get_device_name(0))

# read json config file
config = utils.startup(json_path=json_path, copy_files=True)

if not os.path.isdir(config['data']['data_folder']) and not(args is None):
    config['data']['data_folder'] = args.data_path


# define the dataset parameters for the torch loader
params = {'batch_size': config['network']["batch_size"],
          'shuffle': True,
          'num_workers': 0}
# ========================================================================= #

# build the network object
net = Network.RAKINetwork(config, device)

# load the data
Exemplo n.º 6
0
    once enriched, store data in sqlite database
    once a full year is stored in the database, pull aggregated data (median, average) by year, and location and add to csv file
    truncate table and repeat for all years
"""

from datetime import datetime, timedelta
from dateutil import parser

import pandas
import geopandas as gp
from azureml.opendatasets import NycTlcYellow

from utils import startup_db, startup, append_to_csv
from config import table

startup()
db = startup_db()

# file for geo spatial data - url link expires, so data is saved to source
nyc_df = gp.read_file('nyc.geojson.json')
nyc_df = nyc_df[['neighborhood', 'borough', 'geometry']]

start = datetime.now()

boroughs = ['Bronx', 'Brooklyn', 'Manhattan', 'Queens', 'Staten Island']

# url for doLocationId
taxi_location_url = r"https://s3.amazonaws.com/nyc-tlc/misc/taxi+_zone_lookup.csv"
loc_id_df = pandas.read_csv(taxi_location_url, index_col='LocationID')

date_ranges = [