def run(delete_existing_workflows=True, loglevel=logging.INFO):
    from hyperstream import HyperStream, TimeInterval
    from workflows.deploy_summariser import create_workflow_coord_plate_creation, create_workflow_summariser
    from sphere_connector_package.sphere_connector import SphereConnector

    hyperstream = HyperStream(loglevel=loglevel, file_logger=None)

    if not globs['sphere_connector']:
        globs['sphere_connector'] = SphereConnector(
            config_filename='config.json',
            include_mongo=True,
            include_redcap=False,
            sphere_logger=None)

    workflow_id = "coord3d_plate_creation"
    if delete_existing_workflows:
        hyperstream.workflow_manager.delete_workflow(workflow_id)
    try:
        w = hyperstream.workflow_manager.workflows[workflow_id]
    except KeyError:
        w = create_workflow_coord_plate_creation(hyperstream, safe=False)
        hyperstream.workflow_manager.commit_workflow(workflow_id)

    time_interval = TimeInterval.now_minus(minutes=1)
    w.execute(time_interval)

    workflow_id = "periodic_summaries"
    if delete_existing_workflows:
        hyperstream.workflow_manager.delete_workflow(workflow_id)
    try:
        w = hyperstream.workflow_manager.workflows[workflow_id]
    except KeyError:

        w = create_workflow_summariser(hyperstream,
                                       env_window_size=1 * 60 * 60.0,
                                       rss_window_size=4 * 60 * 60.0,
                                       acc_window_size=4 * 60 * 60.0,
                                       vid_window_size=4 * 60 * 60.0,
                                       pred_window_size=4 * 60 * 60.0,
                                       safe=False)
        hyperstream.workflow_manager.commit_workflow(workflow_id)

    time_interval = TimeInterval.now_minus(minutes=1)
    w.execute(time_interval)

    print('number of non_empty_streams: {}'.format(
        len(hyperstream.channel_manager.memory.non_empty_streams)))
Пример #2
0
def run(house, wearables, delete_existing_workflows=True, loglevel=logging.INFO):
    from hyperstream import HyperStream, TimeInterval, StreamNotFoundError
    from workflows.asset_splitter import split_sphere_assets
    from workflows.deploy_localisation_model import create_workflow_localisation_predict
    # from workflows.deploy_localisation_model_new_api import create_workflow_localisation_predict

    hyperstream = HyperStream(loglevel=loglevel, file_logger=None)
    D = hyperstream.channel_manager.mongo
    A = hyperstream.channel_manager.assets

    experiment_ids = A.find_stream(name="experiments_selected", house=house).window(
        TimeInterval.up_to_now()).last().value

    experiment_ids_str = '_'.join(experiment_ids)
    workflow_id0 = "asset_splitter"
    workflow_id1 = "lda_localisation_model_predict_"+experiment_ids_str

    if delete_existing_workflows:
        hyperstream.workflow_manager.delete_workflow(workflow_id0)
        hyperstream.workflow_manager.delete_workflow(workflow_id1)

    split_sphere_assets(hyperstream, house)

    try:
        w = hyperstream.workflow_manager.workflows[workflow_id1]
    except KeyError:
        w = create_workflow_localisation_predict(hyperstream, house=house, experiment_ids=experiment_ids, safe=False)
        hyperstream.workflow_manager.commit_workflow(workflow_id1)

    # def safe_purge(channel, stream_id):
    #     try:
    #         channel.purge_stream(stream_id)
    #     except StreamNotFoundError:
    #         pass

    # A.purge_node("wearables_by_house")
    # A.purge_node("access_points_by_house")
    # D.purge_node("predicted_locations_broadcasted")

    # for h in [1, 2, 1176, 1116]:
    #     safe_purge(A, StreamId(name="wearables_by_house", meta_data=(('house', h),)))
    #     safe_purge(A, StreamId(name="access_points_by_house", meta_data=(('house', h),)))
    #     for w in wearables:
    #         safe_purge(D, StreamId(name="predicted_locations_broadcasted", meta_data=(('house', h), ('wearable', w))))

    ti0 = TimeInterval.up_to_now()
    ti1 = TimeInterval.now_minus(minutes=1)

    # ti0 = TimeInterval(MIN_DATE, parse("2016-12-02 17:14:25.075Z"))
    # ti1 = TimeInterval(start=ti0.end - timedelta(minutes=1), end=ti0.end)

    w.execute(ti1)

    print('number of non_empty_streams: {}'.format(
        len(hyperstream.channel_manager.memory.non_empty_streams)))

    from display_localisation_predictions import display_predictions
    display_predictions(hyperstream, ti1, house, wearables=wearables)
def run(house, wearables, loglevel=logging.CRITICAL):
    from hyperstream import HyperStream, TimeInterval

    if not globs['hyperstream']:
        globs['hyperstream'] = HyperStream(loglevel=loglevel, file_logger=None)

    display_predictions(globs['hyperstream'],
                        TimeInterval.now_minus(minutes=1), house, wearables)
    print()

    from display_access_points import display_access_points

    display_access_points(house=house)
    print()
def run(house, sync_approx_time, delete_existing_workflows=True, loglevel=logging.INFO):
    from hyperstream import HyperStream, TimeInterval
    from hyperstream.utils import duration2str
    from workflows.display_wearable_sync_events import create_workflow_list_wearable_sync_events
    from workflows.asset_splitter import split_sphere_assets
    from dateutil.parser import parse
    from datetime import timedelta

    hyperstream = HyperStream(loglevel=loglevel, file_logger=None)

    # Various channels
    S = hyperstream.channel_manager.sphere
    D = hyperstream.channel_manager.mongo
    X = hyperstream.channel_manager.summary
    M = hyperstream.channel_manager.memory
    A = hyperstream.channel_manager.assets

    # if delete_existing_workflows:
    #     hyperstream.workflow_manager.delete_workflow("asset_splitter")

    # split_sphere_assets(hyperstream, house)

    hyperstream.plate_manager.delete_plate("H")
    hyperstream.plate_manager.create_plate(
        plate_id="H",
        description="All houses",
        meta_data_id="house",
        values=[],
        complement=True,
        parent_plate=None
    )

    workflow_id = "list_wearable_sync_events"

    if delete_existing_workflows:
        hyperstream.workflow_manager.delete_workflow(workflow_id)

    try:
        w = hyperstream.workflow_manager.workflows[workflow_id]
    except KeyError:
        w = create_workflow_list_wearable_sync_events(hyperstream, house, safe=False)
        hyperstream.workflow_manager.commit_workflow(workflow_id)
    time_interval = TimeInterval.now_minus(minutes=1)
    time_interval = TimeInterval(
        parse("2017-04-29 19:04:55.000Z"),
        parse("2017-04-29 19:05:10.000Z"))
    time_interval = TimeInterval( # DS350055.DS2 recording started 16:26:11, sync in 2s
        parse("2017-04-29 15:26:00.000Z"),
        parse("2017-04-29 15:27:00.000Z"))
    # A 2.1 2017-04-29 15:26:41.091000 --- +28s
    # A 2.2 2017-04-29 15:26:41.251000
    # A 2.5 2017-04-29 15:26:41.601000
    # A 2.7 2017-04-29 15:26:41.761000
    # A 2.0 2017-04-29 15:26:42.041000
    # A 2.8 2017-04-29 15:26:42.631000
    # A 2.0 2017-04-29 15:26:43.049001
    # A 3.8 2017-04-29 15:26:43.209000
    # A 2.9 2017-04-29 15:26:43.289000
    time_interval = TimeInterval( # DS350055.DS2 recording ended 16:34:09, sync in -5s
        parse("2017-04-29 15:34:25.000Z"),
        parse("2017-04-29 15:34:45.000Z"))
    # too gentle taps
    time_interval = TimeInterval(  # DS350054.DS2 recording started 11:55:47, sync in 2s
        parse("2017-04-29 10:56:00.000Z"),
        parse("2017-04-29 10:56:30.000Z"))
    # A 2.1 2017-04-29 10:55:24.084000 --- -25s --- WRONG, should be ~ +25s
    # A 2.8 2017-04-29 10:55:24.244000
    # A 3.1 2017-04-29 10:55:24.514000
    # A 3.1 2017-04-29 10:55:24.654000
    # A 3.2 2017-04-29 10:55:25.044000
    # A 3.5 2017-04-29 10:55:25.174000
    # A 3.4 2017-04-29 10:55:25.524000
    # A 3.9 2017-04-29 10:55:25.604000
    # A 3.3 2017-04-29 10:55:25.684000
    # A 3.8 2017-04-29 10:55:25.964001
    # A 2.3 2017-04-29 10:55:26.124001
    # A 2.8 2017-04-29 10:55:26.294000
    # A 2.0 2017-04-29 10:55:26.374000
    # time_interval = TimeInterval(  # DS350054.DS2 recording ended 12:11:39, sync by wear D put hanging ~ -8s..-2s
    #     parse("2017-04-29 11:10:40.000Z"),
    #     parse("2017-04-29 11:12:00.000Z"))
    # C put hanging ~ 2017-04-29 11:11:41
    # D put hanging ~ 2017-04-29 11:11:52 --- +25s..+15s
    time_interval = TimeInterval(  # DS350054.DS2 recording started 11:55:47, sync in 2s
        parse("2017-04-30 09:38:00.000Z"),
        parse("2017-04-30 09:39:40.000Z"))

    time_centre = parse(sync_approx_time)
    time_interval = TimeInterval(  # DS350054.DS2 recording started 11:55:47, sync in 2s
        time_centre - timedelta(seconds=40),
        time_centre + timedelta(seconds=40))


    w.execute(time_interval)

    return True
    
    print('number of sphere non_empty_streams: {}'.format(len(S.non_empty_streams)))
    print('number of memory non_empty_streams: {}'.format(len(M.non_empty_streams)))
    
    # df = M.find_stream(name='experiments_dataframe', house=house).window().values()[0]

    # if len(df) > 0:
    if False:
        # arrow.get(x).humanize()
        # df['start'] = df['start'].map('{:%Y-%m-%d %H:%M:%S}'.format)
        df['duration'] = df['end'] - df['start']
        df['start'] = map(lambda x: '{:%Y-%m-%d %H:%M:%S}'.format(x), df['start'])
        df['end'] = map(lambda x: '{:%Y-%m-%d %H:%M:%S}'.format(x), df['end'])
        # df['duration'] = map(lambda x:'{:%Mmin %Ssec}'.format(x),df['duration'])

        df['start_as_text'] = map(lambda x: arrow.get(x).humanize(), df['start'])
        df['duration_as_text'] = map(lambda x: duration2str(x), df['duration'])

        pd.set_option('display.width', 1000)
        print(df[['id', 'start_as_text', 'duration_as_text', 'start', 'end', 'annotator']].to_string(index=False))
        return True
    else:
        print("DataFrame is empty")
        return False