def create_context(self): srv = get_server() if srv.setup is None: srv.setup = SETUP_STEPS[0] srv.save() index = SETUP_STEPS.index(srv.setup) progress = round((index / (len(SETUP_STEPS) - 2)) * 100, 2) context = {'step': srv.setup, 'progress': progress, 'url': 'setup'} if srv.setup == SETUP_STEPS[0]: start_zero_conf_server() if srv.setup == SETUP_STEPS[1]: context['poll_int_list'] = settings.POLL_INTERVAL_LST elif srv.setup == SETUP_STEPS[2]: hass_devices = hass_rest.get_device_list(settings.HASS_API_URL, srv.hass_api_token) dev_list = get_device_names() hass_devices = list(set(hass_devices).difference(set(dev_list))) context['hass_dev_list'] = hass_devices context['aa_dev_list'] = get_device_names() elif srv.setup == SETUP_STEPS[3]: context['activity_list'] = Activity.objects.all() elif srv.setup == SETUP_STEPS[4]: hass_users = hass_rest.get_user_names( settings.HASS_API_URL, srv.hass_api_token, ) hass_users = list( set(hass_users).difference(set(get_person_hass_names()))) context['hass_user_list'] = hass_users context['aa_user_list'] = Person.objects.all() return context
def start(request): """ creates a new dataset object and assigns it to the server that it knows an experiment is running. Also creates folders like /data/datasets/<datasetname>/activities_subject_<person>.csv> /data/datasets/<datasetname>/devices.csv /data/datasets/<datasetname>/device_mapping.csv ... Parameters ---------- request : Returns ------- True if it was successfull """ ds_name = request.POST.get("name", "") try: Dataset.objects.get(name=ds_name) return False except: pass # 1. create dataset dataset_folder = settings.DATASET_PATH + ds_name + '/' ds = Dataset(name=ds_name, start_time=get_current_time(), path_to_folder=dataset_folder) ds.save() # srv = get_server() srv.dataset = ds srv.save() # 2. create folders and inital files Path(ds.path_to_folder).mkdir(mode=0o777, parents=True, exist_ok=False) create_data_file(ds.path_to_folder) create_device_mapping_file(ds.path_to_folder) create_activity_mapping_file(ds.path_to_folder) # TODO save prior information about persons # TODO save room assignments of sensors and activities # 3. mark all smartphone dirty and delete existing activity files for person in Person.objects.all(): person.reset_activity_file() if hasattr(person, 'smartphone') and person.smartphone is not None: person.smartphone.synchronized = False person.smartphone.save() # create new personstatistic ps = PersonStatistic(name=person.name, dataset=ds) person.person_statistic = ps person.person_statistic.save() person.save() # 4. start logging service that polls data from home assistant start_updater_service() return True
def create_context(self, request): context = {} context['person_list'] = Person.objects.all() context['dataset_list'] = Dataset.objects.all() context['activity_list'] = Activity.objects.all() srv = get_server() context['service_plot_gen'] = (srv.plot_gen_service_pid is not None) try: context['datasets_perstats'] = get_datasets_personal_statistics() except: pass if srv.dataset is not None: context['dataset'] = srv.dataset context['experiment_running'] = True context['polling'] = srv.is_polling context['num_persons'] = len(context['person_list']) context['num_activities'] = len(context['activity_list']) else: context['experiment_running'] = False dev_lst = get_device_names() context['device_lst'] = dev_lst context['num_devs'] = len(dev_lst) return context
def conf_server(request): """ sets server related stuff """ logger.error('test') srv = get_server() try: pol_int = request.POST.get("poll_interval", "") srv.poll_interval = pol_int except: pass srv.save() try: address = request.POST.get("address", "") if input_is_valid_address(address): if input_is_local_address(address): return False, LOCAL_URL_PROVIDED address = url_strip_appendix(address) srv.server_address = address srv.save() return (True, None) else: return False, INVALID_ADDRESS_PROVIDED except: return (True, None)
def get_context(self): srv = get_server() person_list = Person.objects.all() act_list = Activity.objects.all() url = 'config' exp_active = experiment.is_active() refresh_hass_token() # get hass devices hass_devices = hass_rest.get_device_list( settings.HASS_API_URL , srv.hass_api_token) dev_list = get_device_names() hass_devices = list(set(hass_devices).difference(set(dev_list))) # get hass users hass_users = hass_rest.get_user_names( settings.HASS_API_URL, srv.hass_api_token,) hass_users = list(set(hass_users).difference(set(get_person_names()))) return {'server': srv, 'url': url, 'person_list':person_list, 'hass_dev_list' : hass_devices, 'aa_dev_list' : dev_list, 'activity_list' : act_list, 'hass_user_list' : hass_users, 'aa_user_list' : person_list, 'poll_int_list' : settings.POLL_INTERVAL_LST, 'experiment_active':exp_active, }
def get_datasets_personal_statistics(): """ creates a dictionary for every dataset with statistics like number of persons, total number of recorded activities and number of activities. This is used for display in templates """ srv = get_server() hackdct = [] for ds in Dataset.objects.all(): if srv.dataset is not None and srv.dataset.name == ds.name: continue tmp = {} tmp['ds_name'] = ds.name tmp['num_persons'] = len(ds.person_statistics.all()) for ps in ds.person_statistics.all(): tmp['num_activities'] = ps.num_activities try: num_rec_acts = 0 for ps in ds.person_statistics.all(): tmp['num_activities'] = ps.num_activities num_rec_acts += ps.num_recorded_activities tmp['num_recorded_activities'] = num_rec_acts except TypeError: # this is catches the case when somebody has not evaluated the statistic # and therefore this quantitiy shouldn't be set tmp['num_recorded_activities'] = None hackdct.append(tmp) return hackdct
def finish(): # get one last pull from homeassistant from frontend.util import collect_data_from_hass collect_data_from_hass() # deassociate dataset srv = get_server() ds = srv.dataset srv.dataset = None srv.save() # wrap up dataset ds.logging = False ds.end_time = get_current_time() ds.save() # dissacosiate person statistics from persons for person in Person.objects.all(): person.person_statistic = None person.save() # copy stuff activity files persons to dataset folder copy_actfiles2dataset(ds) stop_updater_service()
def pause(): """ indicates to pause logging on AA level and send a message to the HASS component to stop the webhook sendings """ ds = get_server().dataset ds.logging = False ds.save() stop_updater_service()
def post_step1(self, request): """ select poll interval """ p_int = str(request.POST.get("poll_interval", "")) assert p_int in settings.POLL_INTERVAL_LST srv = get_server() srv.poll_interval = p_int srv.save() self._increment_one_step()
def generate_qr_code_data(self, person): url = get_server().server_address data = "{" data += "\"person\" : \"{}\" , ".format(person.name) data += "\"username\" : \"{}\" , ".format('admin') data += "\"password\" : \"{}\" , ".format('asdf') data += "\"url_person\" : \"persons/{}/\" ,".format(str(person.id)) data += "\"url_api\" : \"{}/{}/\"".format(url, settings.REST_API_URL) data += "}" return data
def get_status(): """ gets the experiment status of the server """ srv = get_server() if srv.dataset is None: return 'not_running' elif srv.is_polling: return 'running' else: return 'paused'
def generate_analysis(self, request): name = request.POST.get("dataset_name", "") ds = Dataset.objects.get(name=name) srv = get_server() if srv.dataset is not None and srv.dataset.id == ds.id: copy_actfiles2dataset(ds) collect_data_from_hass() collect_dataset_statistics(ds) set_placeholder_images(ds) start_plot_gen_service(ds)
def generate_qr_code_data(self, person): url = get_server().server_address data = "{" data += "\"person\" : \"%s\" , " % (person.name) data += "\"username\" : \"%s\" , " % ('admin') data += "\"password\" : \"%s\" , " % ('asdf') data += "\"url_person\" : \"%s\" ," % ('persons/' + str(person.id) + '/') data += "\"url_api\" : \"%s\"" % (url + '/api/v1/') data += "}" return data
def kill_plot_get_service(dataset): import os import signal srv = get_server() pid = srv.plot_gen_service_pid if pid is not None: try: os.kill(pid, signal.SIGTERM) except ProcessLookupError: logger.error('process plot gen allready deleted') srv.plot_gen_service_pid = None srv.save()
def export_data(self, request): name = request.POST.get("dataset_name", "") ds = Dataset.objects.get(name=name) srv = get_server() try: if ds.id == srv.dataset.id: copy_actfiles2dataset(ds) collect_data_from_hass() except AttributeError: pass return ds.get_fileResponse()
def create_context(self, request): srv = get_server() context = {} dataset = Dataset.objects.get(pk=int(self._getDatasetId(request))) context['person_list'] = Person.objects.all() context['dataset'] = dataset context['ds'] = dataset context['person_statistics'] = dataset.person_statistics.all() context['datasets_perstats'] = get_datasets_personal_statistics() context['service_plot_gen'] = (srv.plot_gen_service_pid is not None) return context
def get(self, request): srv = get_server() srv.webhook_count += 1 srv.save() if not srv.hass_comp_installed: self.enable_hass_comp() resp = {'state':'success'} elif experiment.get_status() == "running": collect_data_from_hass() resp = {'state':''} else: resp = {} return JsonResponse(resp)
def conf_server(request): srv = get_server() try: pol_int = request.POST.get("poll_interval", "") srv.poll_interval = pol_int except: pass try: address = request.POST.get("address", "") if address != '': srv.server_address = address except: pass srv.save()
def start_plot_gen_service(dataset): """ starts a process that generates plots for a given dataset Parameters ---------- dataset : model.Dataset """ import subprocess srv = get_server() command = ["python3", settings.PLOT_GEN_SERVICE_PATH] command += ['--dataset-id', str(dataset.id)] if settings.DEBUG: command += ['--debug'] proc = subprocess.Popen(command, close_fds=True) srv.plot_gen_service_pid = proc.pid srv.save()
def create_context(self, request): context = {} context['person_list'] = Person.objects.all() context['dataset_list'] = Dataset.objects.all() context['activity_list'] = Activity.objects.all() srv = get_server() context['datasets_perstats'] = get_datasets_personal_statistics() if srv.dataset is not None: context['dataset'] = srv.dataset context['experiment_running'] = True context['polling'] = srv.is_polling else: context['experiment_running'] = False dev_lst = get_device_names() context['device_lst'] = dev_lst context['num_devs'] = len(dev_lst) return context
def get_datasets_personal_statistics(): """ creates a dictionary for every dataset with statistics like number of persons, total number of recorded activities and number of activities. This is used for display in templates """ srv = get_server() hackdct = [] for ds in Dataset.objects.all(): if srv.dataset is not None and srv.dataset.name == ds.name: continue tmp = {} tmp['ds_name'] = ds.name num_rec_acts = 0 tmp['num_persons'] = len(ds.person_statistics.all()) for ps in ds.person_statistics.all(): tmp['num_activities'] = ps.num_activities num_rec_acts += ps.num_recorded_activities tmp['num_recorded_activities'] = num_rec_acts hackdct.append(tmp) return hackdct
def post_step0(self, request): """ reads api key from environment gets outside url gets path to database and pings the activity assistant component """ # get rest api key refresh_hass_token() # get server_address srv = get_server() disc_url = settings.HASS_API_URL + '/discovery_info' tmp = hass_rest.get(disc_url, srv.hass_api_token) srv.server_address = tmp['base_url'] srv.time_zone = hass_rest.get_time_zone(settings.HASS_API_URL, srv.hass_api_token) srv.save() # only advance if the component was installed at hass site #if srv.hass_comp_installed: stop_zero_conf_server() self._increment_one_step()
os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'act_assist.settings') else: logger.info('running in production mode') sys.path.append('/opt/activity_assistant/web') sys.path.append('/etc/opt/activity_assistant/') os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'settings') import django django.setup() from backend.models import Dataset import settings from frontend.util import get_person_names, get_server from pyadlml.dataset import load_act_assist try: logger.info('loading dataset...') dataset = Dataset.objects.get(id=args.dataset_id) data = load_act_assist(dataset.path_to_folder, get_person_names()) logger.info('generating plots for persons...') gen_plots_persons(dataset, data) logger.info('generating plots for devices...') gen_plots_devices(dataset, data) except KeyboardInterrupt: pass finally: logger.info('wrapping up service...') srv = get_server() srv.plot_gen_service_pid = None srv.save() logger.info('exited')
def resume(): ds = get_server().dataset ds.logging = True ds.save() start_updater_service()
def is_active(): srv = get_server() return not srv.dataset is None
def enable_hass_comp(self): srv = get_server() srv.hass_comp_installed = True srv.save()
def _increment_one_step(self): srv = get_server() index = SETUP_STEPS.index(srv.setup) srv.setup = SETUP_STEPS[index + 1] srv.save()