def sync(self, name, addr, rpc_port, stream_port): if self.value != None: self.close() try: value = krpc.connect(name=name, address=addr, rpc_port=rpc_port, stream_port=stream_port) self.value = value self.name = name self.addr = addr self.rpc_port = rpc_port self.stream_port = stream_port self.connected = True self.synced.emit() show_info( "Connection established.", "Successfully\ connected to %s." % addr, "Connected to %s as %s\ with rpc and stream ports respectively %s and %s" % (addr, name, rpc_port, stream_port)) except (socket_error) as serr: if serr.errno == errno.ECONNREFUSED: show_error("Connection refused.", None, None) else: raise serr except krpc.error.ConnectionError as cerr: show_error("Connection Error.", "Connection to krpc server raised error.", str(cerr))
def extract_subtitles_batch(data): for settings in data: val_to_utf8_str(settings, ['input_dir', 'output_dir']) check_dirs(settings) input_lang = 'zh-TW' output_lang = 'zh-TW' extension = 'vtt' if 'input_lang' in settings: input_lang = to_utf8_str(settings['input_lang']) if 'output_lang' in settings: output_lang = to_utf8_str(settings['output_lang']) if 'extension' in settings: extension = to_utf8_str(settings['extension']) get_it_path, get_ot_path = gen_path_tools(settings, ['input_dir', 'output_dir']) input_names = filter(filter_media, listdir(settings['input_dir'])) input_paths = map(get_it_path, input_names) output_paths = map(get_ot_path, input_names) output_paths = map(gen_extend_path('-', extension), output_paths) final_settings = [] for i in range(len(input_names)): final_settings.append({ 'input_path': input_paths[i], 'output_path': output_paths[i], 'input_lang': input_lang, 'output_lang': output_lang, 'extension': extension, }) show_info('Input', settings['input_dir']) start = time.time() extract_subtitles(final_settings) show_time(start) show_info('Input', settings['input_dir'], is_end=True)
def unsync(self): if self.value != None: self.value.close() self.value = None self.name = None self.addr = None self.rpc_port = None self.stream_port = None self.connected = False self.unsynced.emit() show_info("Connection terminated.", None, None)
def gen_subclip(input_dir, output_dir, input_name, intervals): input_path = path.join(input_dir, input_name) for idx, x in enumerate(intervals): output_path = get_output_path(output_dir, input_name, idx, x) cmd = ['pipenv', 'run', 'python', 'cut.py'] args = [ input_path, str(x['start_time']), str(x['end_time']), output_path ] show_info('', output_path, level=2) st_time = time.time() subprocess.call(cmd + args) show_time(st_time) show_info('', output_path, is_end=True, level=2)
def filter_simple_wifi(params): params['subject'] = "Filter WIFI Graphic " email_response = utils.show_info(params) + "\n" path_real, database_name = utils.download_database(params['filename'], full_path = False) only_name = database_name[:database_name.rfind(".")] params["all_time"] = utils.get_datetime() #TIME params["a_func"] = utils.get_datetime() #TIME try: email_response += "Exporting data (WIFI) ..." export_csv ="filter_wifi%s" % (only_name+".csv") wifi_list_mac, wifi_list_name = utils.parse_wifi_list(params['wifi_list']) ExportWifi(wifi_list_mac,wifi_list_name,params['is_blacklist']).run(path_real+database_name,path_real+export_csv) except Exception as e: return utils.get_error(e, params) time_txt = utils.end_func("CSV conversion",params["a_func"]) email_response += "OK\n" email_response += "\n CSV file = "+utils.create_link(params['KEY_IML'],str(path_real),str(export_csv))+"\n\n\n" email_response += time_txt try: email_response += "Building Graphic (Bluetooth) ..." path_graphic ="filter_wifi_%s" % (only_name+".pdf") WifiGraphic().run(path_real+export_csv,path_real+path_graphic) except Exception as e: return utils.get_error(e, params) time_txt = utils.end_func("Graphic creation",params["a_func"]) email_response += "OK\n" email_response += "\n Graphic = "+utils.create_link(params['KEY_IML'],str(path_real),str(path_graphic))+"\n\n\n" email_response += time_txt email_response += utils.end_func("All process",params["all_time"]) return utils.response_email(params['email'],params['subject'], email_response)
def continue_ML(params): params["email_response"] = utils.show_info(params) + "\n" params["only_database_name"] = params['filename'][:params['filename'].rfind(".")] params["subject"] = "Machine Learning - Results (Continue)" params["all_time"] = utils.get_datetime() #TIME if params['level_html'] == 1: params['csvfile'] = params['filename'] df = E_pre_processing(params) E_clustering(df,params) E_graphic(params) elif params['level_html'] == 2: params['csvpreprocessing'] = params['filename'] df = pd.read_csv(open(params['path_real']+params["csvpreprocessing"],"r"), sep=',', header=0, index_col=0) E_clustering(df,params) E_graphic(params) elif params['level_html'] == 3: params['csvcluster'] = params['filename'] E_graphic(params) params["email_response"] += utils.end_func("All process",params["all_time"]) print params["email_response"] return utils.response_email(params['email'],params["subject"], params["email_response"])
def show_id(show_id): sectionTemplate = "./templates/show.tpl" sectionData = utils.show_info(show_id) try: return template("./pages/index.html", version=utils.getVersion(), sectionTemplate=sectionTemplate, query=sectionData) except: return template("./pages/index.html", sectionTemplate="./templates/404.tpl", version=utils.getVersion(), sectionData={})
def exec_ML(params): params["email_response"] = utils.show_info(params) + "\n" params["subject"] = "Machine Learning - Results (With Filter)" E_get_database(params) E_export_csv(params) df = E_pre_processing(params) E_clustering(df,params) E_graphic(params) params["email_response"] += utils.end_func("All process",params["all_time"]) # print params["email_response"] return utils.response_email(params['email'],params["subject"], params["email_response"])
async def login(): user_id, login_token = utils.get_user_id(), utils.get_login_token() just_logged_in = False if not user_id or not login_token: #webbrowser.open(SERVER_HOST + '/local-products-login?port='+str(LOCAL_SERVER_PORT), new=0, autoraise=True) await utils.show_info( "Sincronizador de archivos", "No hay ningún usuario guardado. Inicia sesión...") user_mail, password = await authenticate_box.ask_login() if user_mail == None or password == None: exit() #user_mail= (await aioconsole.ainput("Correo electrónico: ")).strip() #password= (await aioconsole.ainput("Contraseña: ")).strip() try: user_id, login_token = await server.login(mail=user_mail, password=password) except RemoteException as e: await utils.show_warning("Linarand sincronizador de archivos", "Hubo un problema. " + str(e)) return await login() utils.set_user_id(user_id) utils.set_login_token(login_token) utils.save_data() just_logged_in = True try: username = await server.authenticate(user_id=user_id, token=login_token) except RemoteException as e: await utils.show_warning( "Sincronizador de archivos", "Hubo un problema. " + str(e) + ". Eliminando usuario") utils.set_user_id(None) utils.set_login_token(None) utils.save_data() return await login() if just_logged_in: asyncio.ensure_future( utils.show_info( "Sincronizador de archivos", "Sesión iniciada como %s. Puedes ir a la página de Ingeniería Linarand y sincronizar los archivos que desees desde este equipo." % username))
def main(): """ # Examples to run this script ## Example 1: feed arguments from a yaml/yml file * Step 1. Edit your yaml file, ``` $ vim my_settings.yaml ``` The following is the yaml/yml file example ``` --- data: - input_dir: input_videos_1 output_dir: output_videos_1 combnations: - input_name: 1-final-a.mp4 intervals: - start_time: 0.50 end_time: 1.56 - start_time: 1.00 end_time: 2.00 - input_name: 2-final-a.mp4 intervals: - start_time: 0.50 end_time: 1.56 - start_time: 1.00 end_time: 2.00 - input_dir: input_videos_2 output_dir: output_videos_2 combnations: - input_name: 1-final-b.mp4 intervals: - start_time: 0.50 end_time: 1.56 - start_time: 1.00 end_time: 2.00 - input_name: 2-final-b.mp4 intervals: - start_time: 0.50 end_time: 1.56 - start_time: 1.00 end_time: 2.00 ... ``` * Step 2. Run the command ``` $ pipenv run python cut_batch.py my_settings.yaml ``` If you don't provide a yaml file and then run the command like this: ``` $ pipenv run python cut_batch.py ``` "cut_batch.py" would automatically use "cut_batch.yaml" as default; if "cut_batch.yaml" does not exist, the program would raise error. """ yaml_path = get_yaml_path('cut_batch.yaml') config = read_yaml(yaml_path) global_start = time.time() for settings in config['data']: val_to_utf8_str(settings, ['input_dir', 'output_dir']) check_dirs(settings) show_info('Input', settings['input_dir']) build_subclips(settings) show_info('Input', settings['input_dir'], is_end=True) show_time(global_start, prefix='Total')
def exec_ML_1(params): params['subject'] = "Machine Learning - Results" email_response = utils.show_info(params) + "\n" try: email_response += "download database ..." path_real, database_name = utils.download_database(params['filename'], full_path = False) except Exception as e: return utils.get_error(e, params) email_response += "OK\n" email_response += "\n Database = "+utils.create_link(params['KEY_IML'],str(path_real),str(database_name))+"\n\n\n" params['path_real'] = path_real params['database_name'] = database_name params["only_database_name"] = database_name[:database_name.rfind(".")] params["all_time"] = utils.get_datetime() #TIME params["a_func"] = utils.get_datetime() #TIME try: email_response += "Converting csv ..." params["csvfile"] = params["only_database_name"]+".csv" export_csv.run( inputfile=path_real+database_name, outputfile=path_real+params["csvfile"], bluetooth=params['bluetooth'], wifi=params['wifi'], sensorhub=params['optimzation_sensor_hub'], battery=True if params['optimzation_sensor_hub'] else False , optimize=params['optimzation_sensor_hub'], ) except Exception as e: return utils.get_error(e, params) time_txt = utils.end_func("CSV conversion",params["a_func"]) email_response += "OK\n" email_response += "\n CSV file = "+utils.create_link(params['KEY_IML'],str(path_real),str(params["csvfile"]))+"\n\n" email_response += time_txt params["a_func"] = utils.get_datetime() #TIME try: email_response += "PreProcessing ..." df = pd.read_csv(open(path_real+params["csvfile"],"r"), sep=',', header=0, index_col=0) pre_processing = pre.PreProcessing(df,norm=params['optimzation_sensor_hub']) df = pre_processing.build() df.to_csv(path_real+"pre_processing"+params["csvfile"], sep=',', encoding='utf-8', header=True) except Exception as e: return utils.get_error(e, params) time_txt = utils.end_func("PreProcessing",params["a_func"]) email_response += "OK\n" email_response += "\n CSV PreProcessing = "+utils.create_link(params['KEY_IML'],str(path_real),path_real+"pre_processing"+params["csvfile"])+"\n\n" email_response += time_txt params['csvpreprocessing'] = path_real+"pre_processing"+params["csvfile"] params["a_func"] = utils.get_datetime() #TIME try: email_response += "Clustering ..." labels, n_clusters = Clustering(df, mode="fixed_k", n_clusters=int(params['number'])).clusterize() timestamp = list(df.index) params['csvcluster'] = str(params['number'])+"clusters"+params["only_database_name"]+".csv" with open(path_real+params['csvcluster'], 'w') as f: f.write("timestamp,clusters\n") for i in range(len(labels)): f.write("{},{}\n".format(timestamp[i],labels[i])) except Exception as e: return utils.get_error(e, params) time_txt = utils.end_func("Clustering",params["a_func"]) email_response += "OK\n" email_response += "\n CSV MachineLearning = "+utils.create_link(params['KEY_IML'],str(path_real),str(params['csvcluster']))+"\n\n" email_response += time_txt params["a_func"] = utils.get_datetime() #TIME try: email_response += "Creating graphic..." params['pdfgraphic'] = (params["only_database_name"]+".pdf").replace("(","").replace(")","") print "Rscript machine_learning/pdf/pdf_lines.R \""+path_real+params['csvcluster']+"\" \""+path_real+params['pdfgraphic']+"\"" os.system("Rscript machine_learning/pdf/pdf_lines.R \""+path_real+params['csvcluster']+"\" \""+path_real+params['pdfgraphic']+"\"") except Exception as e: return utils.get_error(e, params) time_txt = utils.end_func("Graphic creation",params["a_func"]) email_response += "OK\n" email_response += "\n Graphic = "+utils.create_link(params['KEY_IML'],str(path_real),str(params['pdfgraphic']))+"\n\n" email_response += time_txt print email_response email_response += utils.end_func("All process",params["all_time"]) return utils.response_email(params['email'],"Machine Learning - Results", email_response)
SIMPSON, ) from distributions import (UniformDistribution, SequenceMixin, ExponentialDistribution, GammaDistribution, TriangularDistribution, SimpsonDistribution, GaussDistribution) from lemer import LemerGenerator from utils import (checks_on_circumstantial_evidence, show_info, calculate_period) if __name__ == '__main__': # Lemer Generator lemer = LemerGenerator(A, R0, M, LEMER_N) show_info(lemer, LEMER) checks_on_circumstantial_evidence(lemer.sequence) print() calculate_period(lemer.sequence) print() temp = SequenceMixin(lemer.sequence) # in each distribution below params are set for the best visualisation of distribution histograms # lab with this values was successfully passed # all params were set by the educator to show right work of the algorithms # math characteristics are calculated by numpy functions based on real got values # theoretical characteristics don't satisfy the educator # Uniform Distribution
def redirect_to_show_id(show_id): sectionData = utils.show_info(show_id) return template("./templates/show.tpl", version=utils.getVersion(), result=sectionData)