def gpu_load_names(**kwargs): """ Loads names used for renaming the banks Returns ------- GPU DataFrame """ chronometer = Chronometer.makeStarted() cols = ['seller_name', 'new_seller_name'] dtypes = OrderedDict([ ("seller_name", "category"), ("new_seller_name", "category"), ]) names_table = pyblazing.create_table(table_name='names', type=get_type_schema(col_names_path), path=col_names_path, delimiter='|', names=cols, dtypes=get_dtype_values(dtypes), skip_rows=1) Chronometer.show(chronometer, 'Read Names CSV') return names_table
def create_delinq_features(table, **kwargs): chronometer = Chronometer.makeStarted() query = """SELECT loan_id, min(monthly_reporting_period) as delinquency_30 FROM main.perf where current_loan_delinquency_status >= 1 group by loan_id""" result_delinq_30 = pyblazing.run_query(query, {table.name: table.columns}) query = """SELECT loan_id, min(monthly_reporting_period) as delinquency_90 FROM main.perf where current_loan_delinquency_status >= 3 group by loan_id""" result_delinq_90 = pyblazing.run_query(query, {table.name: table.columns}) query = """SELECT loan_id, min(monthly_reporting_period) as delinquency_180 FROM main.perf where current_loan_delinquency_status >= 6 group by loan_id""" result_delinq_180 = pyblazing.run_query(query, {table.name: table.columns}) new_tables = { "delinq_30": result_delinq_30.columns, "delinq_90": result_delinq_90.columns, "delinq_180": result_delinq_180.columns } query = """SELECT d30.loan_id, delinquency_30, COALESCE(delinquency_90, DATE '1970-01-01') as delinquency_90, COALESCE(delinquency_180, DATE '1970-01-01') as delinquency_180 FROM main.delinq_30 as d30 LEFT OUTER JOIN main.delinq_90 as d90 ON d30.loan_id = d90.loan_id LEFT OUTER JOIN main.delinq_180 as d180 ON d30.loan_id = d180.loan_id""" result_merge = pyblazing.run_query(query, new_tables) Chronometer.show(chronometer, 'Create deliquency features') return result_merge
def final_performance_delinquency(gdf, joined_df, **kwargs): chronometer = Chronometer.makeStarted() tables = {"gdf": gdf, "joined_df": joined_df} query = """SELECT g.loan_id, current_actual_upb, current_loan_delinquency_status, delinquency_12, interest_rate, loan_age, mod_flag, msa, non_interest_bearing_upb FROM main.gdf as g LEFT OUTER JOIN main.joined_df as j ON g.loan_id = j.loan_id and EXTRACT(YEAR FROM g.monthly_reporting_period) = j.timestamp_year and EXTRACT(MONTH FROM g.monthly_reporting_period) = j.timestamp_month """ results = pyblazing.run_query(query, tables) Chronometer.show(chronometer, 'Final performance delinquency') return results
def join_perf_acq_gdfs(perf, acq, **kwargs): chronometer = Chronometer.makeStarted() tables = {"perf": perf, "acq": acq} query = """SELECT p.loan_id, current_actual_upb, current_loan_delinquency_status, delinquency_12, interest_rate, loan_age, mod_flag, msa, non_interest_bearing_upb, borrower_credit_score, dti, first_home_buyer, loan_purpose, mortgage_insurance_percent, num_borrowers, num_units, occupancy_status, orig_channel, orig_cltv, orig_date, orig_interest_rate, orig_loan_term, orig_ltv, orig_upb, product_type, property_state, property_type, relocation_mortgage_indicator, seller_name, zip FROM main.perf as p LEFT OUTER JOIN main.acq as a ON p.loan_id = a.loan_id""" results = pyblazing.run_query(query, tables) Chronometer.show(chronometer, 'Join performance acquitistion gdfs') return results
def create_ever_features(table, **kwargs): chronometer = Chronometer.makeStarted() query = """SELECT loan_id, max(current_loan_delinquency_status) >= 1 as ever_30, max(current_loan_delinquency_status) >= 3 as ever_90, max(current_loan_delinquency_status) >= 6 as ever_180 FROM main.perf group by loan_id""" result = pyblazing.run_query(query, {table.name: table.columns}) Chronometer.show(chronometer, 'Create Ever Features') return result
def combine_joined_12_mon(joined_df, testdf, **kwargs): chronometer = Chronometer.makeStarted() tables = {"joined_df": joined_df, "testdf": testdf} query = """SELECT j.loan_id, j.mrp_timestamp, j.timestamp_month, j.timestamp_year, j.ever_30, j.ever_90, j.ever_180, j.delinquency_30, j.delinquency_90, j.delinquency_180, t.delinquency_12, t.upb_12 FROM main.joined_df as j LEFT OUTER JOIN main.testdf as t ON j.loan_id = t.loan_id and j.timestamp_year = t.timestamp_year and j.timestamp_month = t.timestamp_month""" results = pyblazing.run_query(query, tables) Chronometer.show(chronometer, 'Combine joind 12 month') return results
def join_ever_delinq_features(everdf_tmp, delinq_merge, **kwargs): chronometer = Chronometer.makeStarted() tables = {"everdf": everdf_tmp, "delinq": delinq_merge} query = """SELECT everdf.loan_id as loan_id, ever_30, ever_90, ever_180, COALESCE(delinquency_30, DATE '1970-01-01') as delinquency_30, COALESCE(delinquency_90, DATE '1970-01-01') as delinquency_90, COALESCE(delinquency_180, DATE '1970-01-01') as delinquency_180 FROM main.everdf as everdf LEFT OUTER JOIN main.delinq as delinq ON everdf.loan_id = delinq.loan_id""" result_merge = pyblazing.run_query(query, tables) Chronometer.show(chronometer, 'Create ever deliquency features') return result_merge
def last_mile_cleaning(df, **kwargs): chronometer = Chronometer.makeStarted() for col, dtype in df.dtypes.iteritems(): if str(dtype) == 'category': df[col] = df[col].cat.codes df[col] = df[col].astype('float32') df['delinquency_12'] = df['delinquency_12'] > 0 df['delinquency_12'] = df['delinquency_12'].fillna(False).astype('int32') for column in df.columns: df[column] = df[column].fillna(-1) Chronometer.show(chronometer, 'Last mile cleaning') return df
def gpu_load_acquisition_csv(acquisition_path, **kwargs): """ Loads acquisition data Returns ------- GPU DataFrame """ chronometer = Chronometer.makeStarted() cols = [ 'loan_id', 'orig_channel', 'seller_name', 'orig_interest_rate', 'orig_upb', 'orig_loan_term', 'orig_date', 'first_pay_date', 'orig_ltv', 'orig_cltv', 'num_borrowers', 'dti', 'borrower_credit_score', 'first_home_buyer', 'loan_purpose', 'property_type', 'num_units', 'occupancy_status', 'property_state', 'zip', 'mortgage_insurance_percent', 'product_type', 'coborrow_credit_score', 'mortgage_insurance_type', 'relocation_mortgage_indicator' ] dtypes = OrderedDict([("loan_id", "int64"), ("orig_channel", "category"), ("seller_name", "category"), ("orig_interest_rate", "float64"), ("orig_upb", "int64"), ("orig_loan_term", "int64"), ("orig_date", "date"), ("first_pay_date", "date"), ("orig_ltv", "float64"), ("orig_cltv", "float64"), ("num_borrowers", "float64"), ("dti", "float64"), ("borrower_credit_score", "float64"), ("first_home_buyer", "category"), ("loan_purpose", "category"), ("property_type", "category"), ("num_units", "int64"), ("occupancy_status", "category"), ("property_state", "category"), ("zip", "int64"), ("mortgage_insurance_percent", "float64"), ("product_type", "category"), ("coborrow_credit_score", "float64"), ("mortgage_insurance_type", "float64"), ("relocation_mortgage_indicator", "category")]) print(acquisition_path) acquisition_table = pyblazing.create_table( table_name='acq', type=get_type_schema(acquisition_path), path=acquisition_path, delimiter='|', names=cols, dtypes=get_dtype_values(dtypes), skip_rows=1) Chronometer.show(chronometer, 'Read Acquisition CSV') return acquisition_table
def __init__(self, s, lmbda, mu, nq): self.lmbda = lmbda self.nq = nq self.n = nq self.mu = mu self.s = s self.ei = 0 self.chronometer = Chronometer() self.queue = QueueTP(self.lmbda, self.nq, self.chronometer) self.servers = [] # self.statistic = Statistics(lmbda, mu, s, nq, nq) for i in range(0, s): self.servers.append(Server(self.mu, self.queue, self.chronometer))
def merge_names(names_table, acq_table): chronometer = Chronometer.makeStarted() tables = { names_table.name: names_table.columns, acq_table.name: acq_table.columns } query = """SELECT loan_id, orig_channel, orig_interest_rate, orig_upb, orig_loan_term, orig_date, first_pay_date, orig_ltv, orig_cltv, num_borrowers, dti, borrower_credit_score, first_home_buyer, loan_purpose, property_type, num_units, occupancy_status, property_state, zip, mortgage_insurance_percent, product_type, coborrow_credit_score, mortgage_insurance_type, relocation_mortgage_indicator, new_seller_name as seller_name FROM main.acq as a LEFT OUTER JOIN main.names as n ON a.seller_name = n.seller_name""" result = pyblazing.run_query(query, tables) Chronometer.show(chronometer, 'Create Acquisition (Merge Names)') return result
def read_data(n=415): with Chronometer() as cr: k = 1 imageDTOs = [] with open("input/truth.txt", "r") as reader: length = int(reader.readline()) if n < length: length = n for _ in range(length): n = reader.readline() n = n.split(",") m = int(n[1]) n = int(n[0]) matrix = [] for line in range(n): matrix_line = [] line = reader.readline() line = line.split(",") for j in range(m): matrix_line.append(int(line[j])) matrix.append(matrix_line) truth = reader.readline() truth = truth[:-1] imageDTOs.append(ImageDTO(matrix, truth)) if k % 5 == 0: print("#" + str(k)) k += 1 print('Reading done..\nElapsed time: {:.3f} seconds\nTotal length: {}'. format(float(cr), len(imageDTOs))) return imageDTOs
def acquisition(items, **kwargs): import emoji from prompt_toolkit import print_formatted_text from prompt_toolkit.patch_stdout import patch_stdout from prompt_toolkit.shortcuts.progress_bar import ProgressBar def progress(bar): acq = bar.data acq.start() for result in bar: result.Release() cameras, configs, opts, acqs = zip(*items) movie_camera = emoji.EMOJI_ALIAS_UNICODE[":movie_camera:"] title = f"{movie_camera} Acquiring on " + ", ".join(str(c) for c in cameras) pbar = ProgressBar(title=title) chrono = Chronometer() pool = concurrent.futures.ThreadPoolExecutor(max_workers=len(cameras)) try: with contextlib.ExitStack() as stack: [stack.enter_context(camera) for camera in cameras] [stack.enter_context(acq) for acq in acqs] stack.enter_context(pbar) bars = [ pbar(acq, label=f'{acq.camera}: ', total=acq.nb_frames) for acq in acqs ] stack.enter_context(chrono) for result in pool.map(progress, bars): pass finally: print_formatted_text(f"Elapsed time: {chrono.elapsed:.6f}s")
def camera_acquisition(ctx, trigger, nb_frames, exposure, latency, roi, binning, pixel_format): """do an acquisition on the selected camera""" import emoji from prompt_toolkit import print_formatted_text from prompt_toolkit.patch_stdout import patch_stdout from prompt_toolkit.shortcuts.progress_bar import ProgressBar camera = ctx.obj["camera"] config = ctx.obj["config"] trigger = trigger.lower() config.trigger_source = trigger total_time = nb_frames * (exposure + latency) if roi is not None: roi = [int(i) for i in roi.split(",")] assert len(roi) == 4 movie_camera = emoji.emojize(":movie_camera:") title = f"{movie_camera} Acquiring {nb_frames} frames" if nb_frames: total_time = nb_frames * (exposure + latency) title += f" (Total acq. time: {total_time:.3f}s)" acq = Acquisition( camera, nb_frames, exposure, latency, roi=roi, trigger=trigger, binning=binning, pixel_format=pixel_format ) prog_bar = ProgressBar(title=title) chrono = Chronometer() try: with camera, acq, prog_bar: bar = prog_bar(label=f"{camera}: ", total=nb_frames) acq.start() with Chronometer() as chrono: frame_nb = 0 while frame_nb < nb_frames or not nb_frames: if trigger == "software": pause( "Press any key to trigger acquisition " f"{frame_nb+1} of {nb_frames}... " ) result = next(acq) result.Release() frame_nb += 1 bar.items_completed = frame_nb prog_bar.invalidate() finally: print_formatted_text(f"Elapsed time: {chrono.elapsed:.6f}s")
def test_integration(): t = Chronometer() t.start() a = t.elapsed time.sleep(0.002) b = t.elapsed assert b > a t.stop() c = t.elapsed time.sleep(0.002) d = t.elapsed assert (d - c) < 0.000001
def create_12_mon_features_union(joined_df, **kwargs): chronometer = Chronometer.makeStarted() tables = {"joined_df": joined_df} josh_mody_n_str = "timestamp_year * 12 + timestamp_month - 24000.0" query = "SELECT loan_id, " + josh_mody_n_str + " as josh_mody_n, max(delinquency_12) as max_d12, min(upb_12) as min_upb_12 FROM main.joined_df as joined_df GROUP BY loan_id, " + josh_mody_n_str mastertemp = pyblazing.run_query(query, tables) all_temps = [] all_tokens = [] tables = {"joined_df": mastertemp.columns} n_months = 12 for y in range(1, n_months + 1): josh_mody_n_str = "floor((josh_mody_n - " + str(y) + ")/12.0)" query = "SELECT loan_id, " + josh_mody_n_str + " as josh_mody_n, max(max_d12) > 3 as max_d12_gt3, min(min_upb_12) = 0 as min_upb_12_eq0, min(min_upb_12) as upb_12 FROM main.joined_df as joined_df GROUP BY loan_id, " + josh_mody_n_str metaToken = pyblazing.run_query_get_token(query, tables) all_tokens.append(metaToken) for metaToken in all_tokens: temp = pyblazing.run_query_get_results(metaToken) all_temps.append(temp) y = 1 tables2 = {"temp1": all_temps[0].columns} union_query = "(SELECT loan_id, max_d12_gt3 + min_upb_12_eq0 as delinquency_12, upb_12, floor(((josh_mody_n * 12) + " + str( 24000 + (y - 1)) + ")/12) as timestamp_year, josh_mody_n * 0 + " + str( y) + " as timestamp_month from main.temp" + str(y) + ")" for y in range(2, n_months + 1): tables2["temp" + str(y)] = all_temps[y - 1].columns query = " UNION ALL (SELECT loan_id, max_d12_gt3 + min_upb_12_eq0 as delinquency_12, upb_12, floor(((josh_mody_n * 12) + " + str( 24000 + (y - 1)) + ")/12) as timestamp_year, josh_mody_n * 0 + " + str( y) + " as timestamp_month from main.temp" + str(y) + ")" union_query = union_query + query results = pyblazing.run_query(union_query, tables2) Chronometer.show(chronometer, 'Create 12 month features once') return results
def create_joined_df(gdf, everdf, **kwargs): chronometer = Chronometer.makeStarted() tables = {"perf": gdf, "everdf": everdf} query = """SELECT perf.loan_id as loan_id, perf.monthly_reporting_period as mrp_timestamp, EXTRACT(MONTH FROM perf.monthly_reporting_period) as timestamp_month, EXTRACT(YEAR FROM perf.monthly_reporting_period) as timestamp_year, COALESCE(perf.current_loan_delinquency_status, -1) as delinquency_12, COALESCE(perf.current_actual_upb, 999999999.9) as upb_12, everdf.ever_30 as ever_30, everdf.ever_90 as ever_90, everdf.ever_180 as ever_180, COALESCE(everdf.delinquency_30, DATE '1970-01-01') as delinquency_30, COALESCE(everdf.delinquency_90, DATE '1970-01-01') as delinquency_90, COALESCE(everdf.delinquency_180, DATE '1970-01-01') as delinquency_180 FROM main.perf as perf LEFT OUTER JOIN main.everdf as everdf ON perf.loan_id = everdf.loan_id""" results = pyblazing.run_query(query, tables) Chronometer.show(chronometer, 'Create Joined DF') return results
def sendFromOptions(self): with Chronometer() as t: self.conectar() #se não tiver -g ou -n então dá erro! logging.warn('conectar() demorou {:.3f} seconds!'.format(float(t))) if self.getContatoBol: print(self.getContato()) logging.warn( 'print( self.getContato() ) demorou {:.3f} seconds!'. format(float(t))) if self.msg.c_name: self.enviarMensagem(c_name=self.msg.c_name, message=self.msg.message) self.lockScreen(False) logging.warn('sendFromOptions() demorou {:.3f} seconds!'.format( float(t)))
def read_data_img(resize=-1): with Chronometer() as cr: mias = [] ddsm = [] # t = threading.Thread(target=read_mias_data, args=(mias, resize,)) t2 = threading.Thread(target=read_ddsm_data, args=( ddsm, resize, )) t2.start() # t.start() # t.join() t2.join() imageDTOs = mias + ddsm print('Reading done..\nElapsed time: {:.3f} seconds\nTotal length: {}'. format(float(cr), len(imageDTOs))) write_data(imageDTOs) return imageDTOs
def hydrate(ids, path, filename): """ Hydrate tweets in order to update the data """ T = twarc.Twarc() last_t = 0 with Chronometer() as t: count = 0 hydrated_tweets = [] for tweet in T.hydrate(iter(ids)): assert tweet['id_str'] count += 1 hydrated_tweets.append(tweet) if (int(float(t)) % 10 == 0 and int(float(t)) != last_t): print("Hydrated tweets:", len(hydrated_tweets)) last_t = int(float(t)) with jsonlines.open(path + filename, mode='w') as writer: for obj in hydrated_tweets: writer.write(obj) return count, hydrated_tweets
def upload_users(): """ Uploads all the generated users profile to api/ucarpooling/users/ """ try: con = sqlite3.connect(settings.DATABASE) con.row_factory = sqlite3.Row cursorObj = con.cursor() """Building the query for retrieving all the users and their assigned profiles""" querystring = Query \ .from_(Table(settings.DATABASE_TABLE_ALUMNI)) \ .join(Table(settings.DATABASE_TABLE_ELOQUENCE)) \ .on_field('uuid') \ .join(Table(settings.DATABASE_TABLE_SMOKER)) \ .on_field('uuid') \ .join(Table(settings.DATABASE_TABLE_MUSIC)) \ .on_field('uuid') \ .select('*')\ .limit(settings.LIMIT_USERS) """Executing the query""" rows = cursorObj.execute(querystring.get_sql()).fetchall() """Iteraring for each row in the database for alumni""" with Chronometer() as time_uploading: for alumni in rows: """Building the body in a json-like format for the boy of the POST request""" body = { "email": f"{alumni[settings.FIELDNAME_UUID.lower()]}@mail.com", "password": "******", "first_name": str(alumni[settings.FIELDNAME_UUID.lower()]), "last_name": str(alumni[settings.FIELDNAME_UUID.lower()]), "ucarpoolingprofile": { "sex": alumni[settings.FIELDNAME_SEX.lower()], "smoker": True if alumni[settings.FIELDNAME_SMOKER.lower()] == 'Si' else False, "musicTaste": alumni[settings.FIELDNAME_MUSIC_TASTE.lower()].split(", "), "eloquenceLevel": get_eloquence_level(alumni[settings.FIELDNAME_ELOQUENCE.lower()]) } } "POST the alumni data to the API" response = requests.post( url=settings.USER_URL, json=body, headers={ "Authorization": f'Token {settings.UCARPOOLING_APP_TOKEN}' # Token of the Ucarpooling app } ) if response.status_code == 201: helper.success_message(f'Uploaded successfully alumni {alumni[settings.FIELDNAME_UUID.lower()]}') get_token(con, cursorObj, alumni) else: helper.error_message(f'Error uploading alumni {alumni[settings.FIELDNAME_UUID.lower()]} ' f'---- status code: {response.status_code}: {response.reason}') """Uploading ended""" helper.info_message('=================UPLOADING ENDED=====================') helper.detail_message('Uploading runtime: {:.3f} seconds'.format(float(time_uploading))) except Error: print(Error) finally: """Closing the database connection""" con.close()
def __init__(self): # Inherits from the QWidget class super().__init__() #Setting font self.setFont(QFont('Helvetica',25)) # Get path to the script self.__dirtrs = os.path.dirname(os.path.abspath(__file__)) #Connection to the Rover model self.__rover_model = ConnectionToModel() #timer self.__rover_timer = QtCore.QTimer(self) self.__rover_timer.timeout.connect(self.updateRover) self.__satellites = None ###### RIGHT PART OF THE WIDGET ###### # Start Button self.__start_b = QPushButton('Start', self) self.__start_b.setCheckable(True) self.__start_b.setSizePolicy(QSizePolicy.MinimumExpanding,QSizePolicy.MinimumExpanding) self.__start_b.toggled.connect(self.startRover) # Config Button self.__config_b = QPushButton('Config', self) self.__config_b.setSizePolicy(QSizePolicy.MinimumExpanding,QSizePolicy.MinimumExpanding) self.__config_b.clicked.connect(self.openConfig) # Setting right part layout right_layout = QHBoxLayout() right_layout.addWidget(self.__start_b) right_layout.addWidget(self.__config_b) ###### LEFT PART OF THE WIDGET ###### # Rover image fig = QPixmap(self.__dirtrs +'/img/rover.png') self.__icon = QLabel(self) self.__icon.setPixmap(fig) # Chrono self.__chrono_rover = Chronometer() # Setting left part layout left_layout = QVBoxLayout() left_layout.addWidget(self.__icon) left_layout.addWidget(self.__chrono_rover) ###### LOWER PART OF THE WIDGET ###### # Position indicators Sol_=QLabel('Sol:') Sol_.setAlignment(QtCore.Qt.AlignRight) Lat_=QLabel('Lat:') Lat_.setAlignment(QtCore.Qt.AlignRight) Lon_=QLabel('Lon:') Lon_.setAlignment(QtCore.Qt.AlignRight) Alt_=QLabel('Height:') Alt_.setAlignment(QtCore.Qt.AlignRight) # Calculated Position to be modified by updateRover() self.__lSol=QLabel('') self.__lLat=QLabel('') self.__lLon=QLabel('') self.__lHeight=QLabel('') # Stream indicators status = QLabel('Stream Status:') status.setAlignment(QtCore.Qt.AlignLeft) self.__stream_status = QLabel('Not Started') self.__stream_status.setSizePolicy(QSizePolicy.Expanding,QSizePolicy.Minimum) # Setting lower part layout lower_layout = QHBoxLayout() lower_layout.addWidget(Sol_) lower_layout.addWidget(self.__lSol) lower_layout.addWidget(Lat_) lower_layout.addWidget(self.__lLat) lower_layout.addWidget(Lon_) lower_layout.addWidget(self.__lLon) lower_layout.addWidget(Alt_) lower_layout.addWidget(self.__lHeight) lower_layout2 = QHBoxLayout() lower_layout2.addWidget(status) lower_layout2.addWidget(self.__stream_status) ##### SETTING THE GLOBAL LAYOUT ###### rover_layout1 = QHBoxLayout() rover_layout1.addLayout(left_layout) rover_layout1.addLayout(right_layout) rover_layout = QVBoxLayout() rover_layout.addLayout(rover_layout1) rover_layout.addLayout(lower_layout) rover_layout.addLayout(lower_layout2) self.setLayout(rover_layout)
from sklearn import metrics from sklearn.cluster import DBSCAN import matplotlib.pyplot as plotter from sklearn.tree import DecisionTreeClassifier from sklearn.model_selection import train_test_split DEFAULT_STATION_BAUDRATE: int = 9600 # Baudrate Padrão para Comunicação Serial ou Bluetooth com os Slaves das Estações. DEFAULT_SUPERVISOR_BAUDRATE: int = 4800 # Baudrate Padrão para Comunicação Serial ou Bluetooth com o Slave Supervisor. DEFAULT_STATION_PORT: str = "/dev/ttyS6" # Porta Padrão para Comunicação Serial ou Bluetooth com os Slaves das Estações. DEFAULT_SUPERVISOR_PORT: str = "/dev/ttyS3" # Porta Padrão para Comunicação Serial ou Bluetooth com o Slave Supervisor. DATASET_FILE_PATH: str = "dataset.txt" # Arquivo nos quais estão contidos os dados para feed no Algoritmo DBSCAN. ERRORSET_FILE_PATH: str = "errorset.csv" # Arquivo de armazenamento dos erros encontrados para feed no modelo de classificação Decision Tree. timerStation: Chronometer = Chronometer( ) # Cronômetro para o Tempo gasto em cada Estação. stationThread: Thread = None # Thread que executa a Await-For-Response do Arduino das Estações. controlThread: Thread = None # Thread que controla a Await-For-Response para parada, enquanto a `stationThread` está ocupada com o DBSCAN. eventRoutine: bool = False # Flag para input de comandos. isRunning: bool = False # Indica o estado de execução do parâmetro para os laços das Threads. isControlActive: bool = False # Indica o estado da Thread de Controle. stationPort: Serial = None # Entrada de Comunicação Serial da Estação. supervisorPort: Serial = None # Entrada de Comunicação Serial do Supervisor. # Limit = Average + STD * Threshold -> Limite para Suspeitar Anomalia. threshold: float = 1 # Threshold - Parâmetro que multiplica o corte de desvio médio. avg: float = 4 # Average - Média dos valores de tempo do Dataset.
class TabRover(QWidget): ''' Class TabRover is the sub widget that contains the rover launch, configuration and results for the rover Inherits from QWidget Divided into a Right Part, a Left Part, a Upper Part and a Lower Part Attributes: private path dirtrs : directory of the script private ConnectionToModel rover_model : connector to the Rover Model private QTimer rover_timer : timer of the widget private QPushButton start_b : button that launchs the acquisition private QPushButton config_b : button that opens the Config Window private QLabel icon : rover image private Chronometer chrono_rover : chronometer taht appears on the UI for the rover private QLabel lSol : indicates the mode of acquisition private QLabel lLong : shows the calculated Longitude private QLabel lLat : shows the calculated Latitude private QLabel lHeight : shows the calculated ellipsoidal Height private QLabel stream_status : shows the status of the stream ''' def __init__(self): # Inherits from the QWidget class super().__init__() #Setting font self.setFont(QFont('Helvetica',25)) # Get path to the script self.__dirtrs = os.path.dirname(os.path.abspath(__file__)) #Connection to the Rover model self.__rover_model = ConnectionToModel() #timer self.__rover_timer = QtCore.QTimer(self) self.__rover_timer.timeout.connect(self.updateRover) self.__satellites = None ###### RIGHT PART OF THE WIDGET ###### # Start Button self.__start_b = QPushButton('Start', self) self.__start_b.setCheckable(True) self.__start_b.setSizePolicy(QSizePolicy.MinimumExpanding,QSizePolicy.MinimumExpanding) self.__start_b.toggled.connect(self.startRover) # Config Button self.__config_b = QPushButton('Config', self) self.__config_b.setSizePolicy(QSizePolicy.MinimumExpanding,QSizePolicy.MinimumExpanding) self.__config_b.clicked.connect(self.openConfig) # Setting right part layout right_layout = QHBoxLayout() right_layout.addWidget(self.__start_b) right_layout.addWidget(self.__config_b) ###### LEFT PART OF THE WIDGET ###### # Rover image fig = QPixmap(self.__dirtrs +'/img/rover.png') self.__icon = QLabel(self) self.__icon.setPixmap(fig) # Chrono self.__chrono_rover = Chronometer() # Setting left part layout left_layout = QVBoxLayout() left_layout.addWidget(self.__icon) left_layout.addWidget(self.__chrono_rover) ###### LOWER PART OF THE WIDGET ###### # Position indicators Sol_=QLabel('Sol:') Sol_.setAlignment(QtCore.Qt.AlignRight) Lat_=QLabel('Lat:') Lat_.setAlignment(QtCore.Qt.AlignRight) Lon_=QLabel('Lon:') Lon_.setAlignment(QtCore.Qt.AlignRight) Alt_=QLabel('Height:') Alt_.setAlignment(QtCore.Qt.AlignRight) # Calculated Position to be modified by updateRover() self.__lSol=QLabel('') self.__lLat=QLabel('') self.__lLon=QLabel('') self.__lHeight=QLabel('') # Stream indicators status = QLabel('Stream Status:') status.setAlignment(QtCore.Qt.AlignLeft) self.__stream_status = QLabel('Not Started') self.__stream_status.setSizePolicy(QSizePolicy.Expanding,QSizePolicy.Minimum) # Setting lower part layout lower_layout = QHBoxLayout() lower_layout.addWidget(Sol_) lower_layout.addWidget(self.__lSol) lower_layout.addWidget(Lat_) lower_layout.addWidget(self.__lLat) lower_layout.addWidget(Lon_) lower_layout.addWidget(self.__lLon) lower_layout.addWidget(Alt_) lower_layout.addWidget(self.__lHeight) lower_layout2 = QHBoxLayout() lower_layout2.addWidget(status) lower_layout2.addWidget(self.__stream_status) ##### SETTING THE GLOBAL LAYOUT ###### rover_layout1 = QHBoxLayout() rover_layout1.addLayout(left_layout) rover_layout1.addLayout(right_layout) rover_layout = QVBoxLayout() rover_layout.addLayout(rover_layout1) rover_layout.addLayout(lower_layout) rover_layout.addLayout(lower_layout2) self.setLayout(rover_layout) ################# FUNCTIONS ######################### def getModel(self): ''' getter of the model return ConnectionToModel ''' return self.__rover_model def passSatellites(self, satellites): ''' passes the tabsatellites to connect to the rover timer ''' self.__satellites = satellites def openConfig(self): ''' Opens the RoverConfig subwindow ''' try: # disabling buttons to prevent multi opennings and launchings self.__config_b.setDisabled(True) self.__start_b.setDisabled(True) subWindow = RoverConfigWindow(self) subWindow.setModel(self.__rover_model) subWindow.show() except Exception as e: print(e) # enabling buttons back self.__config_b.setDisabled(False) self.__start_b.setDisabled(False) def startRover(self): ''' Launches the acquisition Notifies the Model Modifies the UI ''' if self.__start_b.isChecked(): # if the acquisition is started try: # Notifying the model real_rover_model = self.__rover_model.getInstanceRover() real_rover_model.startRover() # modifying the UI self.__start_b.setText('Stop') self.__config_b.setDisabled(True) self.__chrono_rover.start() self.__rover_timer.start(1000) except Exception as e: print(e) else: # if the acquisition is stopped try: self.__rover_timer.stop() # Notifying the model real_rover_model = self.__rover_model.getInstanceRover() real_rover_model.stopRover() # modifying the UI self.__start_b.setText('Start') self.__config_b.setDisabled(False) self.__chrono_rover.stop() self.__lSol.setText('') self.__lLat.setText('') self.__lLon.setText('') self.__lHeight.setText('') self.__stream_status.setText('Not Started') except Exception as e: print(e) def updateRover(self): ''' Access the Raws from the model and displays information on screen Displays the the calculus mode, the calculated position and the stream status ''' real_rover_model = self.__rover_model.getInstanceRover() rawsol, rawstream = real_rover_model.getRaw() # solutions if len(rawsol)>34: soltypes=re.findall(r'\(.*\)',rawsol) print(soltypes) try: soltype=soltypes[0][1:-1].strip() self.__lSol.setText(soltype) self.__lSol.setStyleSheet('font-family: Helvetica; font-size: 25pt') except Exception as e: print(e) sols=re.findall(r'\d*\.\d*',rawsol) print(sols) try: self.__lLat.setText(sols[1]) self.__lLon.setText(sols[2]) self.__lHeight.setText(sols[3]) except Exception as e: print(e) #stream rawstreams=rawstream.split('\n') statstr='' for stream in rawstreams: if stream.find('error')>0: streams=stream.split() statstr=streams[0]+' stream error' if stream.find(' C ')>0: streams=stream.split() if streams[0]=='input': statstr=streams[1]+':'+streams[6]+'bps ' else: statstr=streams[0]+':'+streams[8]+'bps ' self.__stream_status.setText(statstr)
def simulator(): """ Simulates the use of the Ucarpooling app """ """Variables for statistics in the simulation""" max_time = 0 total_time = 0 total_errors = 0 """Get the set of people that will request a carpooling partner""" rows = get_requesters() helper.info_message( f'==============STARTING SIMULATION=====================') """Iteraring for each row in the database for alumni""" with Chronometer() as time_simulation: total_row_count = len(rows) row_counter = 0 helper.info_message(f'{total_row_count} records to check matching') for alumni in rows: print('=========================================') row_counter += 1 helper.info_message(f'Progress: {row_counter}/{total_row_count}') alumni_token = alumni['token'] alumni_id = alumni[settings.FIELDNAME_UUID.lower()] alumni_ucarpooling_id = alumni['ucarpoolingprofile_id'] alumni_useritinerary_id = alumni['useritinerary_id'] "GET the matches for the alumni" with Chronometer() as time_matching: response = requests.get( url=get_matcher_url(alumni_useritinerary_id), headers={"Authorization": f'Token {alumni_token}'}) """Time statistics for the matcher of the back-end""" match_time = float(time_matching) helper.detail_message('Match for {} took {:.3f} seconds'.format( alumni_id, match_time)) total_time += match_time max_time = match_time if max_time < match_time else max_time if response.status_code == 200: body_response = response.json() partners = get_carpooling_partner(body_response) if partners: create_carpool(alumni_ucarpooling_id, partners, alumni_useritinerary_id) else: helper.warning_message( f'{alumni_id} had matches but did not travel with poolers' ) else: """The server did not respond a good result""" total_errors += 1 if response.status_code == 420: helper.warning_message( f'{alumni_id} is already in a carpool') elif response.status_code == 204: helper.no_matches_message( f'{alumni_id} did not have any matches') create_carpool(alumni_ucarpooling_id, [], alumni_useritinerary_id) else: helper.error_message( f'Error getting matches for alumni {alumni_id} ' f'---- status code: {response.status_code}: {response.reason}' ) """The simulation ended""" helper.info_message( '=================SIMULATION ENDED=====================') helper.detail_message(f'There was a total of {total_errors} errors') helper.detail_message(f'Max total match time: {max_time} seconds') helper.detail_message('Simulation runtime: {:.3f} seconds'.format( float(time_simulation)))
def main(): long_running_task = lambda: time.sleep(1.) with Chronometer() as t: long_running_task() # that will take a few seconds. print('Phew, that took me {:.3f} seconds!'.format(float(t)))
def upload_users_itinerary(): """ Uploads all the generated users profile to api/ucarpooling/users/ """ alumni_auth = Table(settings.DATABASE_TABLE_AUTH) try: con = sqlite3.connect(settings.DATABASE) con.row_factory = sqlite3.Row cursorObj = con.cursor() """Building the query for retrieving all the users and their assigned profiles""" querystring = Query \ .from_(Table(settings.DATABASE_TABLE_ALUMNI)) \ .join(Table(settings.DATABASE_TABLE_ITINERARY)) \ .on_field('uuid') \ .join(Table(settings.DATABASE_TABLE_CARS)) \ .on_field('uuid') \ .select('*')\ .limit(settings.LIMIT_USERS) # print(querystring.get_sql()) """Executing the query""" rows = cursorObj.execute(querystring.get_sql()).fetchall() with Chronometer() as time_uploading: """Iteraring for each row in the database for alumni""" for alumni in rows: """Building the body in a json-like format for the boy of the POST request""" origen = f'{alumni[settings.FIELDNAME_LATITUDE.lower()]},{alumni[settings.FIELDNAME_LONGITUDE.lower()]}' toa = f'{date.today()}T{alumni[settings.FIELDNAME_TOA.lower()]}Z' body = { "isDriver": True if alumni[settings.FIELDNAME_TRANSPORT.lower()] == 'Car' else False, "origin": origen, "destination": "-25.324491,-57.635437", # Uca latitude and longitude "timeOfArrival": toa } """Getting the token of the alumni for the POST header""" querystring = Query\ .from_(alumni_auth)\ .select(alumni_auth.token)\ .where(alumni_auth.uuid == alumni[settings.FIELDNAME_UUID.lower()]) cursorObj.execute(querystring.get_sql()) alumni_token = (cursorObj.fetchone())['token'] """POST request for the itinerary""" response = requests.post( url=settings.USER_ITINERARY_URL, json=body, headers={ "Authorization": f'Token {alumni_token}' # Token og the Ucarpooling app } ) if response.status_code == 201: helper.success_message(f'Uploaded successfully itinerary for alumni {alumni[settings.FIELDNAME_UUID.lower()]}') body_response = response.json() store_useritinerary_id(con, cursorObj, body_response['id'], alumni[settings.FIELDNAME_UUID.lower()]) else: helper.error_message(f'Error uploading itinerary for alumni {alumni[settings.FIELDNAME_UUID.lower()]} ' f'---- status code: {response.status_code}: {response.reason}') """Uploading ended""" helper.info_message('=================UPLOADING ENDED=====================') helper.detail_message('Uploading runtime: {:.3f} seconds'.format(float(time_uploading))) except Error: print(Error) finally: """Closing the database connection""" con.close()
from time import sleep from chronometer import Chronometer counter = 0 def long_running_task_that_can_fail(): global counter counter += 1 sleep(2.) return counter > 3 with Chronometer() as t: while not long_running_task_that_can_fail(): print('Failed after {:.3f} seconds!'.format(t.reset())) print('Success after {:.3f} seconds!'.format(float(t)))
print("Performance may be reduced.") cl.enqueue_copy(queue, f0_mem, f0, is_blocking=True) cl.enqueue_copy(queue, f1_mem, f1, is_blocking=True) cl.enqueue_copy(queue, m_d_mem, m_d, is_blocking=True) # for each device we have to reinitialize variables row = 0 k = 0 i = 0 f = numpy.zeros_like(f0) M = numpy.array([]).astype(numpy.uint32) sumW = values.sum() start = time.time() chrono = Chronometer().start() for k in range(0, values.size, 1): weight_k = weights.take(k) value_k = values.take(k) sumW = sumW - weight_k if CAPACITY - sumW > weight_k: cmax = CAPACITY - sumW else: cmax = weight_k total_elements = CAPACITY - cmax + 1 if total_elements > 0: power = k % 32 if i%2 == 0:
from balanced_binary_search_tree import Tree import random as rdn from chronometer import Chronometer if __name__ == "__main__": tree_control = Tree() with Chronometer() as time: i = 0 count = 50 #tree_control.insert_node([50,40,35,30,25,47,43,41,44,55,60,57,65]) with Chronometer() as t: while i < count: info = rdn.randint(1,100) tree_control.insert_node(info) if i == count/2: temp = info i = i + 1 print('To insert spend {:.5f} seconds'.format(float(t))) with Chronometer() as t: print('\nin order: ', end='') tree_control.in_order() print('\n\nTo print spend {:.5f} seconds'.format(float(t))) print('\nTree Info: ') print(' root -> ', tree_control.root.info) print(' Left height -> ', tree_control.root.left_height)
def chronometer(time_gun): return Chronometer(time_gun)