# read frames.txt with open(frames_path + 'frames.txt', 'r') as f: time_frames = [int(line.rstrip()) for line in f] print('Frames: {}'.format(time_frames)) # number of motion steps num_motion_steps = len(time_frames) # define time intervals time_intervals = sum_list(time_frames) print('Time intervals: {}'.format(time_intervals)) #%% Create sinograms and randoms tprint('Start Sinograms') for i in range(len(time_intervals) - 1): print('Begin: Frame {}'.format(i)) print('Time interval: {} - {}'.format(time_intervals[i], time_intervals[i + 1])) # listmode-to-sinogram lm2sino.set_time_interval(time_intervals[i], time_intervals[i + 1]) lm2sino.set_up() lm2sino.process() acq_data = lm2sino.get_output() acq_data.write(path_sino + 'sino' + str(i)) # randoms estimate randoms = lm2sino.estimate_randoms()
import art import colorama import platform import os from pytube import YouTube art.tprint("YouTube Video") art.tprint("Py Downloader") url = input( str(colorama.Fore.BLUE + "Put the URL of the video that you want to download: ")) try: youtube = YouTube(url) video = youtube.streams.first() opesys = platform.system() user = os.getlogin() print("Current OS: " + opesys) print("Current User: "******"Linux"): video.download('/home/' + user + '/Downloads') print(colorama.Fore.GREEN + "Video downloaded in /home/" + user + "/Downloads/") exit() elif (opesys == "Windows"): video.download('C:\\users\\' + user + '\\Downloads\\') print(colorama.Fore.GREEN + "Video downloaded in C:/users/" + user + "/Downloads/") exit() except Exception as e:
def saveFile(self): self.db.to_csv('GPA.csv', encoding='utf-8', index=False) art.tprint("BYE !!!", font='roman')
def is_ascii(s): """ Check input string for ASCII compatibility. :param s: input string :type s: str :return: result as bool """ for i in s: if ord(i) > 127: return False return True if __name__ == "__main__": art.tprint("Font Wizard") print("Use this string as input for font resource : ") print(Letters) print("*" * 30) while (True): font_name = input("Please enter font name : ") if font_name in Font_List: print(Error6) else: break font_data = input("Please enter font data (string or list) : ") if not is_utf8(font_data): print(Error5) sys.exit() ascii_flag = "ASCII" if is_ascii(font_data) else "Non-ASCII" if len(font_data) == 0:
:return: None """ input_dict = get_input() file_name = input_dict["file_name"] number_of_files = input_dict["number_of_files"] for i in range(number_of_files): print("Generating {0} from {1}".format(i + 1, number_of_files)) file_name_temp = file_name if number_of_files > 1: file_name_temp = file_name + "_" + str(i + 1) gen_graph(input_dict, file_name_temp) line(40) if __name__ == "__main__": tprint("Pyrgg", "larry3d") tprint("v" + PYRGG_VERSION) description_print() args = sys.argv if len(args) > 1: if args[1].upper() == "TEST": error_flag = doctest.testfile("test.py", verbose=False)[0] sys.exit(error_flag) else: print("Bad Input!") print("Test (Run doctest)") print("Without arg --> Normal Run") else: EXIT_FLAG = False while not EXIT_FLAG: run()
# -*- coding: utf-8 -*- """nafas main.""" import sys from nafas.functions import description_print, get_input_standard, input_filter, get_program_dict, run from nafas.params import NAFAS_VERSION from art import tprint if __name__ == "__main__": args = sys.argv tprint("Nafas") tprint("v" + str(NAFAS_VERSION)) description_print() if len(args) < 2: input_data = get_input_standard() filtered_data = input_filter(input_data) program_data = get_program_dict(filtered_data) run(program_data)
MENU = { "(Static) Amphlett Analysis": "Amphlett_Analysis (Static)", "(Static) Larminiee Analysis": "Larminiee_Analysis (Static)", "(Static) Chamberline Kim Analysis": "Chamberline_Kim_Analysis (Static)", "(Dynamic) Padulles Analysis I ": "Padulles_Analysis I (Dynamic)", "(Dynamic) Padulles Analysis II": "Padulles_Analysis II (Dynamic)", "(Dynamic) Padulles Hauer Analysis": "Padulles_Hauer Analysis (Dynamic)", "(Dynamic) Padulles Amphlett Analysis": "Padulles_Amphlett Analysis (Dynamic)", "(Dynamic) Chakraborty Analysis": "Chakraborty_Analysis (Dynamic)" } MENUKEYS = sorted(MENU.keys()) EXITFLAG = False tprint("OPEM") tprint("v" + str(Version)) print(Description_Menu["Links"]) description_print("Overview", Description_Menu) if "TEST" in ARGSUP: sys.exit(0) check_update(Version) while not EXITFLAG: tprint("Models", font="larry3d") for i, item in enumerate(MENUKEYS): print(str(i + 1) + "-" + item) try: ANALYSISINDEX = int(input(("\nPlease select a model : "))) except Exception: ANALYSISINDEX = -1 if ANALYSISINDEX - 1 in range(len(MENUKEYS)):
# -*- coding: utf-8 -*- """PyCM main.""" import doctest import sys from .pycm_obj import * from .pycm_output import * from art import tprint if __name__ == "__main__": args = sys.argv if len(args) > 1: if args[1].upper() == "TEST": error_flag = doctest.testfile( "pycm_test.py", optionflags=doctest.NORMALIZE_WHITESPACE | doctest.ELLIPSIS | doctest.IGNORE_EXCEPTION_DETAIL, verbose=False)[0] sys.exit(error_flag) else: tprint("pycm") tprint("V:" + PYCM_VERSION) pycm_help() else: tprint("pycm") tprint("V:" + PYCM_VERSION) pycm_help()
def prCyan(skk): print("\033[96m {}\033[00m".format(skk)) def prLightGray(skk): print("\033[97m {}\033[00m".format(skk)) def prBlack(skk): print("\033[98m {}\033[00m".format(skk)) os.system('cls') os.system('color A') art.tprint("Mighty-Trip V2", font="random") prRed( 'This version of MightyTrip V2 is still in Beta, there are many bugs and security risks!' ) prPurple( 'MightyTrip V2 is a Client to Server socket program for remote use of Terminals.' ) prRed('This is an adaptation of MightyTrip by Psyonik and YendisFish') prPurple('Created by: YendisFish') parser = argparse.ArgumentParser(description="MightyTripV2") parser.add_argument('-i', '--ipaddress', help='Server IP Address', required=True) parser.add_argument('-p',
import json import os import random import string import requests from art import tprint print('-' * 80) tprint('Welcome to Fuzzbot!', font='random') print('-' * 80) url = input('Please enter target URL to post to: ') user_argument = input( 'Enter the name of the argument used to pass the user name: ') pass_argument = input( 'Enter the name of the argument used to pass the password: '******'Is the first name to be abbreviated? (Y or N): ') lastinitial_switch = input('Is the last name to be abbreviated? (Y or N): ') specialswitch = input( 'Does email formatting require special charachters appended to end? (Y or N): ' ) adddomain_switch = input( 'Does the username require a randomized eMail domain? (Y or N): ') email_domain = input( 'If all emails should come from a common domain please enter it now, or leave blank for none: ' ) send_limit = int( input( 'Please enter a number of fuzzing attempts to execute, enter 0 to run continuously: ' ))
def main(): con = True print('================================') art.tprint('FORWARD \n MODELLING') while con: print('================================') print('MENU FORWARD MODELLING') print('Opsi:') print('\t1. Open File') print('\t2. Save Data') print('\t3. Proses data') print('\t4. Lihat Plot') print('\t5. Properti Model') print('================================') opsi = int(input('Masukan pilihan yang diinginkan (keluar: 0): ')) if opsi == 0: #keluar print('Keluar dari program...') con = False elif opsi == 1: # open file Tk().withdraw( ) # we don't want a full GUI, so keep the root window from appearing filename = askopenfilename( ) # show an "Open" dialog box and return the path to the selected file try: print('Loading model...') data, fdata = ambilmodel(filename) # print(f'data = {data}') # print(f'fdata = {fdata}') print('STATUS: Model sudah dimasukkan.') except: pass elif opsi == 2: # Save File Tk().withdraw( ) # we don't want a full GUI, so keep the root window from appearing ftypes = [('Text document', '.txt')] savefilename = asksaveasfilename(defaultextension=".txt", filetypes=ftypes, title='Save Data Hasil') print(savefilename) try: writedata = [ f'{_[0]:.6f}\t{_[1]:.6f}\t{_[2]:.6f}\n' for _ in np.transpose([fdata.frekuensi, rhoapp, fasa]) ] print(writedata) with open(savefilename, 'w') as f: f.write('frekuensi\t Apparent Resistivity\t Fasa\n') for wd in writedata: f.write(wd) pass pass except: print('Error: Failed to save data') pass elif opsi == 3: # Proses Data try: rhop, kedalaman = buatmodel(data) rhop = [float(_) for _ in rhop] kedalaman = [float(_) for _ in kedalaman] except: print('ERROR: DATA BELUM ADA!') try: rhoapp, fasa, saturasi = plotforward(data, fdata, kedalaman, rhop) #print(f'rhoapp = {rhoapp}') #print(f'fasa = {fasa}') except: print('ERROR: PLOTFORWARD') elif opsi == 4: # PLOT DATA fig = plt.figure(constrained_layout=True) gs = fig.add_gridspec(2, 2) ax1 = fig.add_subplot(gs[0, 0]) ax2 = fig.add_subplot(gs[1, 0]) ax3 = fig.add_subplot(gs[:, 1]) #ax4 = fig.add_subplot(gs[:,2]) ax1.plot(fdata.frekuensi, rhoapp, 'r-', linewidth=3) ax1.set_yscale('log') ax1.set_xscale('log') ax1.set_title('Apparent Resistivity') ax1.set_xlabel('frekuensi (Hz)') ax1.set_ylabel(r'$\rho_a$ ($\Omega$m)') ax1.grid(True) ax1.invert_xaxis() ax1.set_xlim(min(fdata.frekuensi) / 2, max(fdata.frekuensi)) ax1.set_ylim(min(rhoapp) / 5, max(rhoapp) * 5) ax2.plot(fdata.frekuensi, fasa, 'r-', linewidth=3) #ax2.set_yscale('log') ax2.plot(fdata.frekuensi, np.ones(len(fdata.frekuensi)) * 45, 'k--', linewidth=5) ax2.set_xscale('log') ax2.set_title('Fasa') ax2.set_xlabel('frekuensi (Hz)') ax2.set_ylabel(r'$\phi$ ($\circ$)') ax2.set_xlim(min(fdata.frekuensi) / 2, max(fdata.frekuensi)) ax2.set_ylim(0, 90) ax2.invert_xaxis() ax2.grid(True) ax3.plot(rhop, kedalaman, 'r-', linewidth=3) ax3.set_xscale('log') ax3.set_ylabel('Kedalaman (m)') ax3.set_xlabel(r'Resistivity ($\Omega$m)') ax3.set_xlim(min(rhop) / 5, max(rhop) * 5) if len(kedalaman) > 2: ax3.set_ylim(0, max([abs(_) for _ in kedalaman[:-1]]) * 1.2) ax3.invert_yaxis() ax3.grid(True) ax3.set_title('Respon Resistivitas') # ax4.plot(saturasi, kedalaman, 'r-', linewidth=3) # ax4.set_xscale('log') # ax4.set_ylabel('Kedalaman (m)') # ax4.set_xlabel('Saturasi') # ax4.set_xlim(min(saturasi)/5, max(saturasi)*5) # if len(kedalaman)>2: # ax4.set_ylim(0, max([abs(_) for _ in kedalaman[:-1]])*1.2) # ax4.invert_yaxis() # ax4.grid(True) plt.tight_layout() plt.show(block=False) elif opsi == 5: #properti model try: print('MODEL PROPERTI') print( f'Rentang Frekuensi: {min(fdata.frekuensi)} - {max(fdata.frekuensi)}' ) modelprop = data modelprop[-1][2] = 'homogeneus halfspace' print( tabulate(modelprop, headers=['No', 'Resistivity', 'Ketebalan'])) except: print('ERROR: Model Properti') pass
def main( endpoint: str = endpoint, port: int = 443, protocol: str = "https", collection_name: str = collection_name, in_path: Path = in_path, api_key: str = os.getenv("TYPESENSE_API_KEY"), drop: bool = False, ): tprint("Reguleque") tsClient = ts.Client({ "api_key": api_key or typer.prompt(LOG_PROMPT + " Typesense Admin API Key", type=str), "nodes": [{ "host": os.getenv("TYPESENSE_HOST") or endpoint, "port": port, "protocol": protocol, }], }) typer.echo(LOG_INFO + f" Connected to Typesense at {protocol}://{endpoint}:{port}") daskClient = Client() typer.echo( LOG_INFO + f" Started cluster, you can monitor at {daskClient.dashboard_link}") # List all the files that need loading filepaths = list(in_path.rglob("**/*.csv")) typer.secho(LOG_INFO + f" Found {len(filepaths)} files to load.") try: # Drop pre-existing collection if any if drop: confirm_drop = typer.confirm( LOG_WARN + " Are you sure you want to delete all documents in the cluster and recreate the schema?" ) if not confirm_drop: typer.echo(LOG_ERR + " Canceling execution.", err=True) raise typer.Abort() typer.echo( LOG_WARN + " Drop mode has been enabled, dropping all documents and recreating schema...", err=True, ) try: tsClient.collections[collection_name].delete() except Exception: pass # Create collection with the manual schema tsClient.collections.create(REVENUE_SCHEMA) typer.secho(LOG_INFO + " Created new schema.") # Load all files typer.secho(LOG_INFO + " Processing and uploading documents...") responses: List[List[str]] = [] for filepath in filepaths: entries: List[dict] = process_file(filepath) response: List[str] = import_entries(entries, filepath, tsClient) responses.append(response) responses = daskClient.persist(responses) progress(responses) responses = daskClient.gather(responses) sleep(2) typer.secho( "\n" + LOG_INFO + f" Finished processing and uploading {len(filepaths)} documents.") except ts.exceptions.RequestUnauthorized: typer.echo(LOG_ERR + " Invalid API key or insufficient permissions.", err=True) raise typer.Abort()
except Exception as e: log.failure("Error: " + str(e)) log.warning( "Make sure URL and APP_KEY are the correct ones and host is reachable. See help (-h)" ) def signal_handler(signal, frame): log.failure("Exiting...") sys.exit(0) if __name__ == "__main__": signal.signal(signal.SIGINT, signal_handler) tprint("Larascript") print("Authors: @pwnedshell & @rsgbengi\n") # Get user arguments argsParser = argparse.ArgumentParser() argsParser.add_argument("url", help="The vulnerable URL") argsParser.add_argument("-k", "--appkey", help="The APP_KEY of the service", required=True) argsParser.add_argument("-c", "--command", default="Default welcome echo", help="The command you want to be executed") argsParser.add_argument("-m", "--method",
def run_package( config_file: str, data: pd.DataFrame = None ) -> Tuple[DefaultDict[str, Dict], DefaultDict[str, Dict]]: """ The MASTER function of the entire dnntime package. The runs the entire DL pipeline end-to-end from a) loading data from source, b) ETL preprocessing, c) statistical EDA and visualization and d) model search and evaluations. Parameters ---------- config_file : Provides all the (hyper)parameters needed to this function. Each of its components will be validated. Must be a *.yaml file! data : DataFrame version of the data source. Not needed if the config_file already specifies a file_path the data source. Returns ------- data_dict : A custom CheckpointDict dict that saves a copy of the data during during each and every point of its transformation. model_dict : A custom CheckpointDict dict that saves all the DNN models used, their forecasts, and scores. """ start_time = time.time() # Load config YAML file assert config_file.endswith( ".yaml"), "Config YAML file not found. Please check filepath." try: with open(config_file, "r") as file: content = file.read() config = yaml.safe_load(content) except FileNotFoundError as e: print(e) return None, None # Check the config dict to ensure it passes all of the assertions test.validate_config(config) # Initializing meta config variable(s) prior to STEPS try: ui = config['meta']['user_interface'] space = " " if ui == 'notebook' else " " dt_col = config['meta']['datetime_column'] target = config['meta']['target_column'] except KeyError as e: print(e) return None, None # Remove 'meta' key as it is unneeded del config['meta'] # Initialize STEP counter step_counter = 1 # Introductory texts art.tprint("Running DNN\ntime-series\npackage...") print( "-------------------------------------------------------------------") print( "-------------------------------------------------------------------\n" ) print("SUMMARY STEPS:") for key in config.keys(): print(f" STEP {step_counter}) {config[key]['description']}") step_counter += 1 # Reset counter step_counter = 1 # Add options for space space = " " if ui == 'notebook' else " " # Store all of the data and models checkpoints in a dict to be returned data_dict = CheckpointDict('data') model_dict = CheckpointDict('model') # Add additional parameter from the 'meta' block initially params = { 'ui': ui, 'space': space, 'dt_col': dt_col, 'target': target, 'step_number': step_counter } # Now do each step one by one for key in config.keys(): params['key'] = key if 'etl' in key: data_dict, params = ETLBlock(data_dict, params).run_block(config[key]) elif 'eda' in key: params = EDABlock(data_dict, params).run_block(config[key]) elif 'model' in key: model_dict, params = ModelBlock(data_dict, params).run_block(config[key]) else: print(f"{key} is not 'etl', 'eda', or 'model'. Stopping program " "now, please fix.") return data_dict.get(), model_dict.get() # Print out the best DL model based on lowest given score_type final_stats = {} score_type = params['score_type'] for key in model_dict.get().keys(): final_stats[key] = model_dict.get()[key][score_type] best_model_name = min(final_stats.items(), key=operator.itemgetter(1))[0] print( "\n-----------------------------------------------------------------") print("-----------------------------------------------------------------") print("\nThe most accurate deep learning model is:") print(f" {best_model_name}") best_score = model_dict.get()[best_model_name][score_type] print(f" {score_type.upper()} score: {best_score:.4f}") end_time = time.time() run_time = end_time - start_time print(f"\nTotal package runtime: {(run_time/60):.2f} min") return data_dict.get(), model_dict.get()
import sys import argparse as args import urllib import art as a dotask = False parser = args.ArgumentParser() parser.add_argument('-os', help='Speciify OS', required=True) parser.add_argument("-shutdown", help="Shutdown System | Set to 'True'") parser.add_argument("-log", "-logger", help="Start Key Logger | Set to 'True'") parser.add_argument("-iplog" "-iplogger", help="Start IP Logger | Set to 'True'") defs = parser.parse_args() a.tprint('Project Snowball') checktrue = True osarg = defs.os shutarg = defs.shutdown logarg = defs.log iplarg = defs.iplog def shutdown(): if osarg == 'windows': os.system('python winshutdown.py') if osarg == 'linux': os.system('python linuxshutdown.py') if osarg == 'mac': print('Mac_OS is not supported by Snowball shutdown feature')
import entry import art if __name__ == "__main__": art.tprint("FXS-V1-ALGO") entry.Entry()
ARGS = sys.argv ARGSUP = list(map(str.upper, ARGS)) Menu = { "Amphlett_Analysis (Static)": Amphlett_Analysis, "Larminiee_Analysis (Static)": Larminiee_Analysis, "Chamberline_Kim_Analysis (Static)": Chamberline_Kim_Analysis, "Padulles_Analysis I (Dynamic)": Padulles1_Analysis, "Padulles_Analysis II (Dynamic)": Padulles2_Analysis, "Padulles_Hauer Analysis (Dynamic)": Padulles_Hauer_Analysis, "Padulles_Amphlett Analysis (Dynamic)": Padulles_Amphlett_Analysis } MENUKEYS = sorted(Menu.keys()) EXITFLAG = False check_update(Version) while not EXITFLAG: tprint("OPEM") tprint("v" + str(Version)) description_print("Overview", Description_Menu) for i, item in enumerate(MENUKEYS): print(str(i + 1) + "-" + item) try: ANALYSISINDEX = int(input(("Please Choose Analysis : "))) except Exception: ANALYSISINDEX = -1 if ANALYSISINDEX - 1 in range(len(MENUKEYS)): ANALYSISNAME = MENUKEYS[ANALYSISINDEX - 1] description_print(ANALYSISNAME, Description_Menu) USERINPUT = input( "\nEnter [M]: More Information,[T]: Run Standard Test Vector or any other key to " "continue \n") description_control(Analysis_Name=ANALYSISNAME,
# change the current working directory to the given path os.chdir(working_folder) #%% Create folders for results path_NAC = working_folder + '/recon/NAC/' path_smooth = working_folder + '/recon/SMOOTH/' path_tm = working_folder + '/tm/' if not os.path.exists(path_tm): os.makedirs(path_tm, mode=0o770) print('Create Folder: {}'.format(path_tm)) #%% Registration NAC, delivers transformation matrices # define reference image (first image) and float-path tprint('Start Registration of NACs') # refernce file ref_file = path_smooth + 'smooth_0.nii' ref = Eng_ref.ImageData(ref_file) # float files flo_path = path_smooth list_smooth = [f for f in os.listdir(flo_path) if f.endswith(".nii")] # Niftyreg with EPI images reg_nac(ref, flo_path, list_smooth) tprint('Finish Registration')
def main(self): # Main function to call main_menu_questions = [{ 'type': 'list', 'name': 'todo', 'message': 'What do you want to do?', 'choices': [ 'Insert', 'Update', 'Summary', 'Calculate GPA', 'Save and Close', 'Close' ] }] main_menu_answers = prompt(main_menu_questions, style=custom_style_2) while (main_menu_answers['todo'] != "Save and Close") and (main_menu_answers['todo'] != "Close"): if main_menu_answers['todo'] == "Insert": insert_questions = [ { 'type': 'input', 'name': 'Course Id', 'message': 'Course Id:', 'validate': lambda text: len(text) == 9, }, { 'type': 'input', 'name': 'Course Name', 'message': 'Course Name:', 'validate': lambda text: len(text) > 0, }, { 'type': 'input', 'name': 'Year', 'message': 'Year:', 'validate': lambda text: len(text) > 0 and self.check_int(text), }, { 'type': 'input', 'name': 'Semester', 'message': 'Semester:', 'validate': lambda text: len(text) > 0, }, { 'type': 'input', 'name': 'Credit', 'message': 'Credit:', 'validate': lambda text: len(text) > 0, }, { 'type': 'input', 'name': 'Section', 'message': 'Section:', 'validate': lambda text: len(text) > 0, }, { 'type': 'list', 'name': 'Grade', 'message': 'Grade:', 'choices': [ 'A', 'B+', 'B', 'C+', 'C', 'D+', 'D', 'F', ] }, ] insert_answers = prompt(insert_questions, style=custom_style_2) # Get the answer insert_answers["Grade(Score)"] = self.grade_table[ insert_answers['Grade']] # Map Grade to score self.db = self.db.append( insert_answers, ignore_index=True) # Append the new course to DB elif main_menu_answers['todo'] == "Update": print(tabulate(self.db, headers='keys', tablefmt='psql')) # Show current DB update_questions = [{ 'type': 'input', 'name': 'course', 'message': 'Which course do you want to update?', 'validate': lambda text: int(text) <= self.db.tail(1).index.item(), }] update_answers = prompt(update_questions, style=custom_style_2) course_info = self.db.iloc[int( update_answers['course'] )] # Find selected course from answer default_grade = { 'A': 0, 'B+': 1, 'B': 2, 'C+': 3, 'C': 4, 'D+': 5, 'D': 6, 'F': 7 } update_course_questions = [ { 'type': 'input', 'name': 'Course Id', 'message': 'Course Id:', 'default': course_info['Course Id'], 'validate': lambda text: len(text) == 9, }, { 'type': 'input', 'name': 'Course Name', 'message': 'Course Name:', 'default': course_info['Course Name'], 'validate': lambda text: len(text) > 0, }, { 'type': 'input', 'name': 'Year', 'message': 'Year:', 'default': course_info['Year'], 'validate': lambda text: len(text) > 0 and self.check_int(text), }, { 'type': 'input', 'name': 'Semester', 'message': 'Semester:', 'default': course_info['Semester'], 'validate': lambda text: len(text) > 0, }, { 'type': 'input', 'name': 'Credit', 'message': 'Credit:', 'default': str(course_info['Credit']), 'validate': lambda text: len(text) > 0, }, { 'type': 'input', 'name': 'Section', 'message': 'Section:', 'default': str(course_info['Section']), 'validate': lambda text: len(text) > 0, }, { 'type': 'list', 'name': 'Grade', 'message': 'Grade:', 'choices': [ 'A', 'B+', 'B', 'C+', 'C', 'D+', 'D', 'F', ], 'default': 5, }, ] update_course_answers = prompt(update_course_questions, style=custom_style_2) update_course_answers["Grade(Score)"] = self.grade_table[ update_course_answers['Grade']] # Map Grade from score self.update_row_with_dict( update_course_answers, int(update_answers['course'])) # Update DB from answer elif main_menu_answers['todo'] == "Summary": # Show DB print(tabulate(self.db, headers='keys', tablefmt='psql')) elif main_menu_answers['todo'] == "Calculate GPA": year_semester = self.db[[ "Year", "Semester" ]] # Select only Year and Semester coloumn year_semester = year_semester.drop_duplicates( ) # Drop the duplicate value # Get the unique Year and Semester pair year_ = [ "Year {} Semester {}".format(row[1]['Year'], row[1]['Semester']) for row in year_semester.iterrows() ] year_ = sorted(year_) select_term_questions = [{ 'type': 'list', 'name': 'term', 'message': 'Which year and semester do you want?', 'choices': ['Total'] + year_ }] select_term_answers = prompt(select_term_questions, style=custom_style_2) if select_term_answers['term'] == 'Total': self.db = self.db.astype({'Credit': int }) # Cast credit column to int # (After update mode will cause conflict!) product_column = self.db['Grade(Score)'] * self.db[ 'Credit'] # Calculate Product of Score and Credit gpa = product_column.sum() / self.db['Credit'].sum( ) # Calculate SumProduct / Sum of Credit if not self.singlepoint( gpa ): # Avoid round down if there is one floating point gpa = gpa // 0.01 / 100 art.tprint("GPA = {}".format(gpa)) # Print the GPA else: select_type_questions = [{ 'type': 'list', 'name': 'type', 'message': 'Which type do you want?', 'choices': ['Cumulative', 'Semester'] }] select_type_answer = prompt(select_type_questions, style=custom_style_2) if select_type_answer['type'] == "Semester": # Calculate specific term year_split = select_term_answers['term'].split( " ") # Extract the selected term year = year_split[1] # Get the year semester = year_split[3] # Get the semester # Filter the db by given Year and Semester db_filter = self.db[(self.db["Year"] == year) & (self.db["Semester"] == semester)] db_filter = db_filter.astype( {'Credit': int}) # Cast credit column to int (same as above) product_column = db_filter['Grade(Score)'] * db_filter[ 'Credit'] # Calculate Product of Score and Credit gpa = product_column.sum() / db_filter['Credit'].sum( ) # Calculate SumProduct / Sum of Credit if not self.singlepoint( gpa ): # Avoid round down if there is one floating point gpa = gpa // 0.01 / 100 art.tprint("GPA = {}".format(gpa)) # Print the GPA else: # Calculate cumulative gpa # Slice the list of year to only use semester year_slice = year_[:year_. index(select_term_answers['term']) + 1] sum_product = 0 credit_sum = 0 for select in year_slice: select_split = select.split( " ") # Extract the selected term year_select = select_split[1] # Get the year semester_select = select_split[ 3] # Get the semester # Filter the db by given Year and Semester db_filter = self.db[ (self.db["Year"] == year_select) & (self.db["Semester"] == semester_select)] product_column = db_filter[ 'Grade(Score)'] * db_filter[ 'Credit'] # Calculate Product of Score and Credit sum_product += product_column.sum( ) # Collect product cretdit = db_filter['Credit'].sum( ) # Calculate sum of credit credit_sum += cretdit # Collect credit gpa = sum_product / credit_sum # Calculate SumProduct / Sum of Credit if not self.singlepoint( gpa ): # Avoid round down if there is one floating point gpa = gpa // 0.01 / 100 art.tprint("GPA = {}".format(gpa)) # Print the GPA main_menu_answers = prompt( main_menu_questions, style=custom_style_2) # Loop the main menu question if main_menu_answers['todo'] == "Save and Close": # Save current state of db to csv and close print("Saving....") self.db.to_csv('GPA.csv', encoding='utf-8', index=False) print("Complete!") art.tprint("Goodbye") else: art.tprint("Goodbye")
@app2.route("/jitsi") @cross_origin() def jitsi(): """ This function just responds to the browser URL localhost:5000/ :return: the rendered template "home.html" """ return render_template('jitsi.html') ## FOR CORS def set_cors_headers_on_response(response): response.headers['Access-Control-Allow-Origin'] = '*' response.headers['Access-Control-Allow-Headers'] = 'X-Requested-With' response.headers['Access-Control-Allow-Methods'] = 'OPTIONS' return response if __name__ == "__main__": tprint("CUMMINS", font="starwars") print("Creating all tables...") db.create_all_tables() print("Initialising MQTT...") mqtt_messaging.init_mqtt() app2.run(host=FLASK_HOST, port=FLASK_PORT, debug=False)
import torch.nn as nn import torchvision import torchvision.transforms as transforms import math import time import sys sys.path.append('../util') from time_utils import format_time import numpy as np from art import tprint import argparse from os import path import pickle tprint('IDDL', font='larry3d') # Parse the command line arguments parser = argparse.ArgumentParser() parser.add_argument('-f', '--file', type=str, help='The filename of the pickle file.') parser.add_argument('-n', '--num', type=int, default=1, help='The number of base classifiers used in the enbemble.') args = parser.parse_args() FILENAME = args.file NUM_CLASSIFIER = args.num # The hyperparameters LEARNING_RATE = 0.01 N_EPOCH = 10 BATCH_SIZE = 128 BASE_CLASSIFIER_NAME = 'B-ResNet-V-4' print('Hyperparameters:')
if event.code == "ABS_RZ": self.values['rightY'] = event.state - 127 if event.code == "ABS_X": self.values['leftX'] = event.state - 127 if event.code == "ABS_Z": self.values['rightX'] = event.state - 127 ''' self.tankDrive( self.values['leftY'], self.values['rightY'] ) ''' self.bR, self.bL, self.fL, self.fR = 60,-60,-60,60 ''' print( "{0},{1},{2},{3}".format( str(self.bR).rjust(4, ' '), str(self.bL).rjust(4, ' '), str(self.fL).rjust(4, ' '), str(self.fR).rjust(4, ' ') ),end='\r') ''' print(self.values, end='\r') self.drive() if __name__ == "__main__": TJ = robot() art.tprint('== TJ ==', font='dog') #big, art.tprint('Educational Platform', font = 'small') art.tprint('Teleoperational Testbed', font = 'small') TJ.run()
# -*- coding: utf-8 -*- import doctest import sys from .pycm_obj import * from .pycm_output import * from art import tprint if __name__ == "__main__": args = sys.argv if len(args) > 1: if args[1].upper() == "TEST": doctest.testfile( "test.py", optionflags=doctest.NORMALIZE_WHITESPACE | doctest.ELLIPSIS | doctest.IGNORE_EXCEPTION_DETAIL, verbose=False) else: tprint("pycm") tprint("V:" + VERSION) pycm_help() else: tprint("pycm") tprint("V:" + VERSION) pycm_help()
''' 81-Text To Art pip install art==3.4 more info: http://www.shaghighi.ir/art/ ''' from art import text2art, tprint # Default. print(text2art("default")) # Boxed. tprint("block", font="block", chr_ignore=True) # Random. print(text2art("random", "rand"))
def welcome(): screen.clear_screen() tprint('Conversor de Unidades', 'standard') screen.print_framed_message(['Seleccione el tipo de Conversion que desea realizar', ], 125)
import subprocess, sys, os, signal, art from rich import print from KeyLogger import * def print_info(err): if(err): print("\n[bold red]ERROR:[/bold red] "+ err) print("[bold]USAGE:[/bold] python start.py [italic]OPTIONS[/italic]") print("\t-s Open visible process of KeyLogger") print("\t-b Open background process of KeyLogger") if __name__ == "__main__": print("") art.tprint("KeyLogger") print("\t\t\t\t\t[italic]by peppepol[/italic]") if(len(sys.argv) == 2): kl = KeyLogger() if(sys.argv[1] == "-s"): print_info(False) print("\n[bold green]START LOG[/bold green] (PRESS 'esc' to exit)") kl.start() elif(sys.argv[1] == "-b"): f = open("logs.txt", "w+") pid = subprocess.Popen(["python","KeyLogger.py"], stdout=f, creationflags=subprocess.DETACHED_PROCESS).pid print("\n[bold green]KEYLOGGER STARTED[/bold green]") print("PROCESS PID was saved in logs.txt - Use python start.py -k [bold white]" + str(pid) + "[/bold white] to terminate process") print("[italic magenta](You can close this console)[/italic magenta]") f.write(str(pid))
def UserOptions(): # Clear terminal window os.system('CLS') print('\n ') # Display RootHash title as ASCII art print(COLOR + Style.BRIGHT) tprint('RootHash', FONT) print(Fore.RESET) print('\n') # This part get the RootHash owner name from settings file Owner = '' # Open settings file with open(Spath, 'r') as F: Owner = F.read() k = Owner.split('\n') # Get owner name into variable Owner = k[0] # RootHash user options in welcome screen print(f' Welcome To RootHash {Owner}') print(' \t[1] Add New Record') print(' \t[2] Modify Record') print(' \t[3] Delete Record') print(' \t[4] View All Records') print(' \t[5] Change Root Password') print(' \t[6] About RootHash') print(' \t[7] Exit\n') try: # Get user command cmd = input('[>>] Your Command : ') if cmd == '1': RCD.new_entry(Owner, DPATH, COLOR, FONT) elif cmd == '2': RCD.modify(Owner, DPATH, COLOR, FONT) elif cmd == '3': RCD.delete_entry(Owner, DPATH, COLOR, FONT) elif cmd == '4': RCD.view_all(Owner, DPATH, COLOR, FONT) elif cmd == '5': OPT.change_mastercode(SPATH, COLOR, FONT) elif cmd == '6': CDT.about(COLOR, FONT) elif cmd == '7': sys.exit(2) else: raise InvalidCommandError("Invalid command") except InvalidCommandError as e: print(Fore.RED + '\n[!] Invalid Command') print(Fore.RESET) # Run windows PAUSE command os.system('PAUSE') UserOptions() # This part ignores 'Ctrl+C cancel operation' except KeyboardInterrupt: UserOptions()
def showAllEpisode(e): os.system("cls") tprint("VIU VDO DOWNLOAD") for index, ep in enumerate(e, 1): print("[{0}] {1}".format(index, ep.name))
print('Correct TM: {}'.format(num_tm)) #%% Files attn_file = py_path + '/UKL_data/mu_Map/stir_mu_map.hv' # .nii possible, requires ITK print('mu-Map: {}'.format(attn_file)) # template for acq_data template_acq_data = Pet.AcquisitionData('Siemens_mMR', span=11, max_ring_diff=16, view_mash_factor=1) template_acq_data.write('template.hs') #%% resample mu-Map into correct space and transform via invers tm tprint('Start Resampling') attn_image = Pet.ImageData(attn_file) # template refernce template_image = template_acq_data.create_uniform_image(1.0) # EPI refernce file epi_data_path = py_path + '/UKL_data/EPI/1/' epi_file = epi_data_path + 'epi_0.nii' epi = Eng_ref.ImageData(epi_file) # define space matrices tm_fwd = numpy.loadtxt(py_path + '/UKL_data/tm_epi/reg_NAC_EPI.txt') tm_inv = numpy.loadtxt(py_path + '/UKL_data/tm_epi/reg_NAC_EPI_inv.txt')
CHOICES[selection[0]](*args) elif selection[0] = "p": CHOICES[selection[0]](*args) elif selection[0] = "c": CHOICES[selection[0]](*args) else: get_choice(*args) # Variables to hold webscraping url = 'http://www.imdb.com/chart/top' response = requests.get(url) soup = BeautifulSoup(response.text, 'html.parser') #get movies from td table data titleColumn movies = soup.select('td.titleColumn') #get title in formation form title= directors = [a.attrs.get('title').split("(dir.)")[0] for a in soup.select('td.titleColumn a')] populate_movies(Movie,movie_db, movies, directors) #Fancey menu heading print("\n" * 2, "*" * 39) tprint(" MOVIES\n by Mike", font="cybermedium") print("*" * 39, "\n" * 2) #Start of app here on the bottom lonely by itself app_start()