def simple_sensorHub(params): params['subject'] = "SensorHub Graphic " email_response = "" path_real, database_name = utils.download_database(params['filename'], full_path = False) only_name = database_name[:database_name.rfind(".")] params["all_time"] = utils.get_datetime() #TIME params["a_func"] = utils.get_datetime() #TIME try: email_response += "Exporting data (Sensor Hub) ..." export_csv ="SensorHub_%s" % (only_name+".csv") ExportSensorHub().run(path_real+database_name, path_real+export_csv) except Exception as e: return utils.get_error(e, params) time_txt = utils.end_func("CSV conversion",params["a_func"]) email_response += "OK\n" email_response += "\n CSV file = "+utils.create_link(params['KEY_IML'],str(path_real),str(export_csv))+"\n\n\n" email_response += time_txt try: email_response += "Building Graphic (Sensor Hub) ..." path_graphic ="sensor_hub_%s" % (only_name+".pdf") SensorHubGraphic().run(path_real+export_csv,path_real+path_graphic) except Exception as e: return utils.get_error(e, params) time_txt = utils.end_func("Graphic creation",params["a_func"]) email_response += "OK\n" email_response += "\n Graphic = "+utils.create_link(params['KEY_IML'],str(path_real),str(path_graphic))+"\n\n\n" email_response += time_txt email_response += utils.end_func("All process",params["all_time"]) return utils.response_email(params['email'],params['subject'], email_response)
def setup(self): self.ap = AddrPool() self.iftmp = 'pr2x{0}' self.ifaces = [] self.ifd = self.get_ifname() create_link(self.ifd, kind='dummy') self.ip = IPDB(mode=self.mode)
def setup(self): create_link('dummyX', 'dummy') t_url = 'unix://\0%s' % (uuid.uuid4()) p_url = 'unix://\0%s' % (uuid.uuid4()) self.connect = Event() self.release = Event() target = Process(target=_run_remote_uplink, args=(t_url, self.connect, self.release)) target.daemon = True target.start() self.connect.wait() self.connect.clear() proxy = Process(target=_run_remote_uplink, args=(p_url, self.connect, self.release)) proxy.daemon = True proxy.start() self.connect.wait() self.connect.clear() self.ip = IPRoute(do_connect=False) link, proxy = self.ip.connect(p_url) self.ip.register('bala', proxy) link, host = self.ip.connect(t_url, addr=proxy) service = self.ip.discover(self.ip.default_target, addr=host) self.ip.default_peer = host self.ip.default_dport = service self.dev = self.ip.link_lookup(ifname='dummyX')
def filter_simple_wifi(params): params['subject'] = "Filter WIFI Graphic " email_response = utils.show_info(params) + "\n" path_real, database_name = utils.download_database(params['filename'], full_path = False) only_name = database_name[:database_name.rfind(".")] params["all_time"] = utils.get_datetime() #TIME params["a_func"] = utils.get_datetime() #TIME try: email_response += "Exporting data (WIFI) ..." export_csv ="filter_wifi%s" % (only_name+".csv") wifi_list_mac, wifi_list_name = utils.parse_wifi_list(params['wifi_list']) ExportWifi(wifi_list_mac,wifi_list_name,params['is_blacklist']).run(path_real+database_name,path_real+export_csv) except Exception as e: return utils.get_error(e, params) time_txt = utils.end_func("CSV conversion",params["a_func"]) email_response += "OK\n" email_response += "\n CSV file = "+utils.create_link(params['KEY_IML'],str(path_real),str(export_csv))+"\n\n\n" email_response += time_txt try: email_response += "Building Graphic (Bluetooth) ..." path_graphic ="filter_wifi_%s" % (only_name+".pdf") WifiGraphic().run(path_real+export_csv,path_real+path_graphic) except Exception as e: return utils.get_error(e, params) time_txt = utils.end_func("Graphic creation",params["a_func"]) email_response += "OK\n" email_response += "\n Graphic = "+utils.create_link(params['KEY_IML'],str(path_real),str(path_graphic))+"\n\n\n" email_response += time_txt email_response += utils.end_func("All process",params["all_time"]) return utils.response_email(params['email'],params['subject'], email_response)
def setup(self): create_link('dummyX', 'dummy') url = 'unix://\0%s' % (uuid.uuid4()) self.uplink = IPRoute() self.uplink.serve(url) self.ip = IPRoute(host=url) self.dev = self.ip.link_lookup(ifname='dummyX')
def test_remove_link(self): require_user('root') create_link('bala', 'dummy') dev = self.ip.link_lookup(ifname='bala')[0] try: self.ip.link_remove(dev) except NetlinkError: pass assert len(self.ip.link_lookup(ifname='bala')) == 0
def main(): args = get_args() create_link(args.dataset_dir) if args.training: print("Training") model = md.cycleGAN(args) model.train(args) if args.testing: print("Testing") tst.test(args)
def test_route(self): require_user('root') create_link('bala', 'dummy') dev = self.ip.link_lookup(ifname='bala')[0] self.ip.link('set', index=dev, state='up') self.ip.addr('add', dev, address='172.16.0.2', mask=24) self.ip.route('add', prefix='172.16.1.0', mask=24, gateway='172.16.0.1') assert grep('ip route show', pattern='172.16.1.0/24.*172.16.0.1') remove_link('bala')
def setup(self): create_link('dummyX', 'dummy') url = 'unix://\0%s' % (uuid.uuid4()) self.connect = Event() self.release = Event() target = Process(target=_run_remote_uplink, args=(url, self.connect, self.release)) target.daemon = True target.start() self.connect.wait() self.ip = IPRoute(host=url) self.dev = self.ip.link_lookup(ifname='dummyX')
def setup(self): create_link('dummyX', 'dummy') url = 'unix+%s://\0%s' % (self.ssl_proto, uuid.uuid4()) self.uplink = IPRoute() self.uplink.serve(url, key='server.key', cert='server.crt', ca='ca.crt') self.ip = IPRoute(host=url, key='client.key', cert='client.crt', ca='ca.crt') self.dev = self.ip.link_lookup(ifname='dummyX')
def test_route_table_2048(self): require_user('root') create_link('bala', 'dummy') dev = self.ip.link_lookup(ifname='bala')[0] self.ip.link('set', index=dev, state='up') self.ip.addr('add', dev, address='172.16.0.2', mask=24) self.ip.route('add', prefix='172.16.1.0', mask=24, gateway='172.16.0.1', table=2048) assert grep('ip route show table 2048', pattern='172.16.1.0/24.*172.16.0.1') remove_link('bala')
def main(): args = get_args() create_link(args.dataset_dir) str_ids = args.gpu_ids.split(',') args.gpu_ids = [] for str_id in str_ids: id = int(str_id) if id >= 0: args.gpu_ids.append(id) print(not args.no_dropout) md = model.cycleGAN(args) md.train(args)
def main(args): create_link(args.dataset_dir) args.gpu_ids = [] for i in range(torch.cuda.device_count()): args.gpu_ids.append(i) if args.training: print('Training') model = md.cycleGAN(args) model.train(args) if args.gen_samples: print('Generating samples') gen_samples.gen_samples(args, 'last')
def main(): args = get_args() create_link(args.dataset_dir) str_ids = args.gpu_ids.split(',') args.gpu_ids = [] for str_id in str_ids: id = int(str_id) if id >= 0: args.gpu_ids.append(id) print(not args.no_dropout) if args.training: print("Training") model = tn.cycleGAN(args) model.train(args) if args.testing: print("Testing") tst.test(args)
def setup(self): create_link('dummyX', 'dummy') t_url = 'unix://\0%s' % (uuid.uuid4()) p_url = 'unix://\0%s' % (uuid.uuid4()) self.uplink = IPRoute() self.uplink.serve(t_url) self.proxy = IPRoute(host=t_url) self.proxy.serve(p_url) self.ip = IPRoute(host=p_url) service = self.ip.discover(self.ip.default_target, addr=self.proxy.default_peer) self.ip.default_peer = self.proxy.default_peer self.ip.default_dport = service self.dev = self.ip.link_lookup(ifname='dummyX')
def setup(self): create_link('dummyX', 'dummy') url = 'unix+%s://\0%s' % (self.ssl_proto, uuid.uuid4()) self.connect = Event() self.release = Event() target = Process(target=_run_remote_uplink, args=(url, self.connect, self.release, 'server.key', 'server.crt', 'ca.crt')) target.daemon = True target.start() self.connect.wait() self.ip = IPRoute(host=url, key='client.key', cert='client.crt', ca='ca.crt') self.dev = self.ip.link_lookup(ifname='dummyX')
def E_get_database(params): try: params["email_response"] += "download database ..." utils.download_database_full(params, full_path = False) except Exception as e: utils.get_error(e, params) raise params["email_response"] += "OK\n" params["email_response"] += "\n Database = "+utils.create_link(params['KEY_IML'],str(params['path_real']),str(params['database_name']))+"\n\n\n" params["only_database_name"] = params['database_name'][:params['database_name'].rfind(".")] params["all_time"] = utils.get_datetime() #TIME
def E_graphic(params): params["a_func"] = utils.get_datetime() #TIME try: params["email_response"] += "Creating graphic..." params['pdfgraphic'] = (params["only_database_name"]+".pdf").replace("(","").replace(")","") print "Rscript machine_learning/pdf/pdf_lines.R \""+params['path_real']+params['csvcluster']+"\" \""+params['path_real']+params['pdfgraphic']+"\"" os.system("Rscript machine_learning/pdf/pdf_lines.R \""+params['path_real']+params['csvcluster']+"\" \""+params['path_real']+params['pdfgraphic']+"\"") except Exception as e: utils.get_error(e, params) raise time_txt = utils.end_func("Graphic creation",params["a_func"]) params["email_response"] += "OK\n" params["email_response"] += "\n Graphic = "+utils.create_link(params['KEY_IML'],str(params['path_real']),str(params['pdfgraphic']))+"\n\n" params["email_response"] += time_txt
def E_clustering(df,params): params["a_func"] = utils.get_datetime() #TIME try: params["email_response"] += "Clustering ..." labels, n_clusters = Clustering(df, mode="fixed_k", n_clusters=int(params['number'])).clusterize() timestamp = list(df.index) params['csvcluster'] = str(params['number'])+"clusters"+params["only_database_name"]+".csv" with open(params['path_real']+params['csvcluster'], 'w') as f: f.write("timestamp,clusters\n") for i in range(len(labels)): f.write("{},{}\n".format(timestamp[i],labels[i])) except Exception as e: utils.get_error(e, params) raise time_txt = utils.end_func("Clustering",params["a_func"]) params["email_response"] += "OK\n" params["email_response"] += "\n CSV MachineLearning = "+utils.create_link(params['KEY_IML'],str(params['path_real']),str(params['csvcluster']),str("3"),{})+"\n\n" params["email_response"] += time_txt
def E_pre_processing(params): params["a_func"] = utils.get_datetime() #TIME try: params["email_response"] += "PreProcessing ..." params['csvpreprocessing'] = "pre_processing"+params["csvfile"] df = pd.read_csv(open(params['path_real']+params["csvfile"],"r"), sep=',', header=0, index_col=0) pre_processing = pre.PreProcessing(df,norm=params['optimzation_sensor_hub']) df = pre_processing.build() df.to_csv(params['path_real']+params['csvpreprocessing'], sep=',', encoding='utf-8', header=True) except Exception as e: utils.get_error(e, params) raise time_txt = utils.end_func("PreProcessing",params["a_func"]) params["email_response"] += "OK\n" params["email_response"] += "\n CSV PreProcessing = "+utils.create_link(params['KEY_IML'],str(params['path_real']),str(params['csvpreprocessing']),str("2"),{'optimzation_sensor_hub':params["optimzation_sensor_hub"]})+"\n\n" params["email_response"] += time_txt return df
def get(self): crypto_path_real = self.request.get("filename") crypto_path_graphic = self.request.get("config") cipher_suite = Fernet(KEY_IML) path_real = cipher_suite.decrypt(bytes(crypto_path_real)) path_graphic = cipher_suite.decrypt(bytes(crypto_path_graphic)) crypto_level = self.request.get("key") if crypto_level: level = cipher_suite.decrypt(bytes(crypto_level)) crypto_old_params = self.request.get("extra") old_params = cipher_suite.decrypt(bytes(crypto_old_params)) params = { 'level_html': int(level), 'filename': crypto_path_real, 'config': crypto_path_graphic, 'url': utils.create_link(KEY_IML, path_real, path_graphic), 'params': old_params, } self.render_template('download_file.html', params) else: utils.response_download_file(self, path_real, path_graphic)
def setup(self): self.ifaces = [] self.ifd = self.get_ifname() create_link(self.ifd, kind="dummy") self.ip = IPDB(mode=self.mode)
def setup(self): self.ifname = uifname() create_link(self.ifname, "dummy")
def setup(self): create_link('dummyX', 'dummy') self.ip = IPDB(mode='direct')
def download_database(params): params['subject'] = "Database - " path_real, database_name = utils.download_database(params['filename'], full_path = False) utils.response_email(params['email'] , params['subject'], utils.create_link(params['KEY_IML'],path_real,database_name))
def create(self, kind='dummy'): name = self.get_ifname() create_link(name, kind=kind) idx = self.ip.link_lookup(ifname=name)[0] return (name, idx)
def setup(self): create_link('dummyX', 'dummy')
def setup(self): self.ifname = uifname() create_link(self.ifname, 'dummy')
def setup(self): create_link('dummyX', 'dummy') self.ip = IPRoute(fork=True) self.dev = self.ip.link_lookup(ifname='dummyX')
def setup(self): create_link('dummyX', 'dummy') self.release = Event() self.ip = IPRoute() self.dev = self.ip.link_lookup(ifname='dummyX')
def E_export_csv(params): init_time = None finish_time = None params["whitelist_ble"]=[] params["whitelist_wifi"]=[] params["blacklist_ble"]=[] params["blacklist_wifi"]=[] if params['bluetooth']: if 'is_only_hardcode' in params: if params['is_only_hardcode']: # Hardcode print "entro errado" else: print "entro certo" if 'ble_list' in params: if params['ble_list']: if 'is_blacklist_ble' in params: if params['is_blacklist_ble']: params["blacklist_ble"] = params['ble_list'] else: params["whitelist_ble"] = params['ble_list'] if params['wifi']: if 'wifi_list' in params: if params['wifi_list']: if 'is_blacklist_wifi' in params: if params['is_blacklist_wifi']: params["blacklist_wifi"] = utils.parse_wifi_list(params['wifi_list'])[0] else: params["whitelist_wifi"] = utils.parse_wifi_list(params['wifi_list'])[0] if 'set_period' in params: if params['set_period']: init_time = params['init_time'] finish_time = params['finish_time'] params["a_func"] = utils.get_datetime() #TIME try: params["email_response"] += "Converting csv ..." params["csvfile"] = params["only_database_name"]+".csv" export_csv.run( inputfile=params['path_real']+params['database_name'], outputfile=params['path_real']+params["csvfile"], bluetooth=params['bluetooth'], wifi=params['wifi'], sensorhub=params['optimzation_sensor_hub'], battery= True if params['optimzation_sensor_hub'] else False, optimize=params['optimzation_sensor_hub'], whitelist_ble=params["whitelist_ble"], whitelist_wifi=params["whitelist_wifi"], blacklist_ble=params["blacklist_ble"], blacklist_wifi=params["blacklist_wifi"], init_time = init_time, finish_time = finish_time, ) except Exception as e: utils.get_error(e, params) raise time_txt = utils.end_func("CSV conversion",params["a_func"]) params["email_response"] += "OK\n" params["email_response"] += "\n CSV file = "+utils.create_link(params['KEY_IML'],str(params['path_real']),str(params["csvfile"]),str("1"),{'optimzation_sensor_hub':params["optimzation_sensor_hub"]})+"\n\n" params["email_response"] += time_txt
def setup(self): create_link('dummyX', 'dummy') self.ip = IPDB(mode=self.mode, fork=True)
def setup(self): self.ifaces = [] self.ifd = self.get_ifname() create_link(self.ifd, kind='dummy') self.ip = IPDB(mode=self.mode)
def create(self, kind='dummy'): name = uifname() create_link(name, kind=kind) idx = self.ip.link_lookup(ifname=name)[0] self.ifaces.append(idx) return (name, idx)
def exec_ML_1(params): params['subject'] = "Machine Learning - Results" email_response = utils.show_info(params) + "\n" try: email_response += "download database ..." path_real, database_name = utils.download_database(params['filename'], full_path = False) except Exception as e: return utils.get_error(e, params) email_response += "OK\n" email_response += "\n Database = "+utils.create_link(params['KEY_IML'],str(path_real),str(database_name))+"\n\n\n" params['path_real'] = path_real params['database_name'] = database_name params["only_database_name"] = database_name[:database_name.rfind(".")] params["all_time"] = utils.get_datetime() #TIME params["a_func"] = utils.get_datetime() #TIME try: email_response += "Converting csv ..." params["csvfile"] = params["only_database_name"]+".csv" export_csv.run( inputfile=path_real+database_name, outputfile=path_real+params["csvfile"], bluetooth=params['bluetooth'], wifi=params['wifi'], sensorhub=params['optimzation_sensor_hub'], battery=True if params['optimzation_sensor_hub'] else False , optimize=params['optimzation_sensor_hub'], ) except Exception as e: return utils.get_error(e, params) time_txt = utils.end_func("CSV conversion",params["a_func"]) email_response += "OK\n" email_response += "\n CSV file = "+utils.create_link(params['KEY_IML'],str(path_real),str(params["csvfile"]))+"\n\n" email_response += time_txt params["a_func"] = utils.get_datetime() #TIME try: email_response += "PreProcessing ..." df = pd.read_csv(open(path_real+params["csvfile"],"r"), sep=',', header=0, index_col=0) pre_processing = pre.PreProcessing(df,norm=params['optimzation_sensor_hub']) df = pre_processing.build() df.to_csv(path_real+"pre_processing"+params["csvfile"], sep=',', encoding='utf-8', header=True) except Exception as e: return utils.get_error(e, params) time_txt = utils.end_func("PreProcessing",params["a_func"]) email_response += "OK\n" email_response += "\n CSV PreProcessing = "+utils.create_link(params['KEY_IML'],str(path_real),path_real+"pre_processing"+params["csvfile"])+"\n\n" email_response += time_txt params['csvpreprocessing'] = path_real+"pre_processing"+params["csvfile"] params["a_func"] = utils.get_datetime() #TIME try: email_response += "Clustering ..." labels, n_clusters = Clustering(df, mode="fixed_k", n_clusters=int(params['number'])).clusterize() timestamp = list(df.index) params['csvcluster'] = str(params['number'])+"clusters"+params["only_database_name"]+".csv" with open(path_real+params['csvcluster'], 'w') as f: f.write("timestamp,clusters\n") for i in range(len(labels)): f.write("{},{}\n".format(timestamp[i],labels[i])) except Exception as e: return utils.get_error(e, params) time_txt = utils.end_func("Clustering",params["a_func"]) email_response += "OK\n" email_response += "\n CSV MachineLearning = "+utils.create_link(params['KEY_IML'],str(path_real),str(params['csvcluster']))+"\n\n" email_response += time_txt params["a_func"] = utils.get_datetime() #TIME try: email_response += "Creating graphic..." params['pdfgraphic'] = (params["only_database_name"]+".pdf").replace("(","").replace(")","") print "Rscript machine_learning/pdf/pdf_lines.R \""+path_real+params['csvcluster']+"\" \""+path_real+params['pdfgraphic']+"\"" os.system("Rscript machine_learning/pdf/pdf_lines.R \""+path_real+params['csvcluster']+"\" \""+path_real+params['pdfgraphic']+"\"") except Exception as e: return utils.get_error(e, params) time_txt = utils.end_func("Graphic creation",params["a_func"]) email_response += "OK\n" email_response += "\n Graphic = "+utils.create_link(params['KEY_IML'],str(path_real),str(params['pdfgraphic']))+"\n\n" email_response += time_txt print email_response email_response += utils.end_func("All process",params["all_time"]) return utils.response_email(params['email'],"Machine Learning - Results", email_response)