workflow_id = "periodic_meta_summaries" if delete_existing_workflows: hyperstream.workflow_manager.delete_workflow(workflow_id) try: w = hyperstream.workflow_manager.workflows[workflow_id] except KeyError: w = create_workflow_meta_summariser(hyperstream, safe=False) hyperstream.workflow_manager.commit_workflow(workflow_id) time_interval = TimeInterval.now_minus(days=2) w.execute(time_interval) M = hyperstream.channel_manager.memory print('number of non_empty_streams: {}'.format( len(hyperstream.channel_manager.memory.non_empty_streams))) print 12345 if __name__ == '__main__': import sys from os import path sys.path.insert(0, path.dirname(path.dirname(path.abspath(__file__)))) from sphere_plugins.sphere.utils import ArgumentParser args = ArgumentParser.logging_parser(default_loglevel=logging.INFO) run(delete_existing_workflows=True, loglevel=args.loglevel)
# # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, # EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF # MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. # IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, # DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR # OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE # OR OTHER DEALINGS IN THE SOFTWARE. import logging import sys from os import path def run(house, loglevel=logging.INFO): from hyperstream import HyperStream from workflows.asset_splitter import split_sphere_assets hyperstream = HyperStream(loglevel=loglevel, file_logger=None) split_sphere_assets(hyperstream, house=house) if __name__ == '__main__': sys.path.insert(0, path.dirname(path.dirname(path.abspath(__file__)))) from sphere_plugins.sphere.utils import ArgumentParser args = ArgumentParser.wearable_list_parser(default_loglevel=logging.INFO) delete_existing_workflows = True run(args.house, args.loglevel)
format(model_name)) for experiment_id in list(experiment_ids): print("Experiment id: {}".format(experiment_id)) print("Time interval: {}".format( reconstruct_interval(experiment_id))) print("Accuracy: {}".format( pformat(model['performance'][experiment_id]['accuracy']))) print("Macro F1: {}".format( pformat( model['performance'][experiment_id]['f1_score_macro']))) print("Micro F1: {}".format( pformat( model['performance'][experiment_id]['f1_score_micro']))) print("Confusion Matrix:") pprint(model['performance'][experiment_id]['confusion_matrix']) print("") return True if __name__ == '__main__': from os import path sys.path.insert(0, path.dirname(path.dirname(path.abspath(__file__)))) from sphere_plugins.sphere.utils import ArgumentParser args = ArgumentParser.technician_selection_parser( default_loglevel=logging.INFO) run(house=args.house, selection=map(int, args.technicians_selection), loglevel=args.loglevel)
# df = M.find_stream(name='experiments_dataframe', house=house).window().values()[0] # if len(df) > 0: if False: # arrow.get(x).humanize() # df['start'] = df['start'].map('{:%Y-%m-%d %H:%M:%S}'.format) df['duration'] = df['end'] - df['start'] df['start'] = map(lambda x: '{:%Y-%m-%d %H:%M:%S}'.format(x), df['start']) df['end'] = map(lambda x: '{:%Y-%m-%d %H:%M:%S}'.format(x), df['end']) # df['duration'] = map(lambda x:'{:%Mmin %Ssec}'.format(x),df['duration']) df['start_as_text'] = map(lambda x: arrow.get(x).humanize(), df['start']) df['duration_as_text'] = map(lambda x: duration2str(x), df['duration']) pd.set_option('display.width', 1000) print(df[['id', 'start_as_text', 'duration_as_text', 'start', 'end', 'annotator']].to_string(index=False)) return True else: print("DataFrame is empty") return False if __name__ == '__main__': import sys from os import path sys.path.insert(0, path.dirname(path.dirname(path.abspath(__file__)))) from sphere_plugins.sphere.utils import ArgumentParser args = ArgumentParser.wearable_tap_sync_parser(default_loglevel=logging.INFO) run(args.house, args.time, delete_existing_workflows=True, loglevel=args.loglevel)