def roc_auc(self, fpr, tpr, auc, path): """ Generate the ROC AUC curve Arguments: - fpr: float False positive rates - tpr: float True positive rates - auc: float ROC AUC score - path: str Absolute path to the directory where the curve must be saved Returns: - No return values """ plt.figure(figsize=(9, 9)) plt.plot(fpr, tpr, color='red', lw=2, label='ROC curve') plt.plot([0, 1], [0, 1], color='blue', lw=2, linestyle='--') plt.xlabel('FPR') plt.ylabel('TPR') plt.title('ROC curve with AUC: {0}'.format(auc), size=15) plt.savefig(path) logger.info("{0} successfully generated".format(path)) return None
def decision_tree_viz(self, model, path): """ Generate visualization of decision tree Arguments: - model: Decision tree model - path: str Absolute path to the file where the decision tree visualization is to be generated Returns: - No return values """ dot_data = StringIO() export_graphviz(model, out_file=dot_data, filled=True, rounded=True, special_characters=True, feature_names=self.read_feature_file( self.v.features_f), class_names=['0', '1']) graph = pydotplus.graph_from_dot_data(dot_data.getvalue()) self.check_extension(path, 'png') graph.write_png(path) logger.info("{0} successfully generated".format(path)) return None
def confmat_heatmap(self, cm, score, path): """ Generate the heatmap for the confusion matrix Arguments: - cm: array confusion matrix - score: float Accuracy - path: str Absolute path to the directory where the heatmap will be generated Returns: - No return values """ plt.figure(figsize=(9, 9)) sns.heatmap(cm, annot=True, fmt=".3f", linewidths=.5, square=True, cmap='Blues_r') plt.ylabel('True label') plt.xlabel('Predicted label') all_sample_title = 'Accuracy Score: {0}'.format(score) plt.title(all_sample_title, size=15) plt.savefig(path) logger.info("{0} successfully generated".format(path)) return None
def _remember_we_got_that_packet(self, packet): """Add the packet id to the list of packet ids already received""" id_to, id_from, id_packet, _ = packet[:4] self._received_packets_header_and_time.append( (id_from, id_to, id_packet, time.time())) logger.info( 'message (id : {}) stored in the list of packages already received.'.format(id_packet))
def import_dataset(self, abs_path): """ Imports the dataset Arguments: - abs_path: str Absolute path to the dataset file Returns: - data: pandas dataframe Dataset as a pandas dataframe """ self.check_file_existence(abs_path) logger.info("Reading {0}".format(abs_path)) ext = abs_path.split('.')[-1] if ext == 'csv': data = pd.read_csv(abs_path) elif ext == 'xls': data = pd.read_excel(abs_path) else: logger.error("Expected extensions csv or xls, got {0}".format(ext)) self.error() logger.info("{0} has {1} rows and {2} columns".format( abs_path, data.shape[0], data.shape[1])) return data
async def on_message(self, message): if message.author == self.bot.user: return if (self.bot.conf["azurefilter"] == True): if len(message.attachments) > 0: json = {'url': '{0}'.format(message.attachments[0].url)} logger.info("Azure request: " + self.bot.conf["azure_url_ext"] + "\n" + str(json) + "\n" + str(self.bot.conf["azure_key"])) tuomio = self.azure_request(self.bot.conf["azure_url_ext"], json, self.bot.conf["azure_key"]) if (tuomio is False): await message.add_reaction(self.bot.conf["emoji_ok"]) elif (tuomio == True and message.channel.is_nsfw() is False): await message.delete() msg = 'Image was deleted due to high Adultscore\nPlease repost to NSFW' logger.info("NSFW image deleted") await message.channel.send(msg) else: logger.warning("Failed to fetch NFSW rating, status: " + tuomio) await message.add_reaction(self.bot.conf["emoji_nok"])
def run(self): logger.info("Starting Controller") self.bStopThread = False while self.bStopThread != True: curTime = datetime.datetime.now() sTime = curTime.strftime("%d.%m.%Y %H:%M:%S") self._readConf() # request status from Arduino self._updateState() time.sleep(UPDATE_INVERVAL) # reads and processes all data from serial queue while self.ser.inWaiting() > 0: inputValue = self.ser.readline() str = inputValue.decode() str = str.strip() logger.debug("RESPONSE Raw: "+str) self._processControllerResponse(str) if self.bStopRequest: if not self.bCoolOn and not self.bHeatOn: self.bStopThread = True else: self.stopHeatingCooling() logger.info("Controller Stopped")
def _update(self): updateTime = self.lastUpdateTime + datetime.timedelta( seconds=self.interval) # check update flag is enabled # check Brewfather URL is defined # check the updated interval has elapsed since last update # check relevant data for the system is valid # check the data has been updated since last update if self.bUpdate and self.sURL != "" and datetime.datetime.now() > updateTime and \ self.postdata["name"] != "" and \ self.bNewData: try: self.jsondump = json.dumps(self.postdata).encode('utf8') req = request.Request( self.sURL, data=self.jsondump, headers={'content-type': 'application/json'}) response = request.urlopen(req) self.bNewData = False self.lastUpdateTime = datetime.datetime.now() logger.info("BrewFather: " + self.postdata["temp"] + "C, " + self.postdata["gravity"] + "SG, " + self.postdata["aux_temp"] + "C, " + self.lastUpdateTime.strftime("%d.%m.%Y %H:%M:%S")) except: logger.error("Exception posting to Brewfather: " + self.getLastJSON()) else: logger.debug("Update parameters:\nbUpdate = "+str(self.bUpdate)+"\nsUrl = "+self.sURL+"\npostdata.Name = "+self.postdata["name"] + \ "\npostdata.temp = "+self.postdata["temp"]+"\npostdata.gravity = "+self.postdata["gravity"] + "\npostdata.aux_temp = "+self.postdata["aux_temp"] + \ "\nupdateTime = "+updateTime.strftime("%d.%m.%Y %H:%M:%S") + \ "\nlastUpdateTime = "+self.lastUpdateTime.strftime("%d.%m.%Y %H:%M:%S") + \ "\nCurrent Time = "+datetime.datetime.now().strftime("%d.%m.%Y %H:%M:%S"))
async def create_club(self, ctx, group): self.bot.commands_called = self.bot.commands_called + 1 logger.info("command create club called") group = group.lower() if (bool(re.match(self.bot.conf["club_prefix"], group))): role = discord.utils.get(ctx.guild.roles, name=group) if (role == None): guild = ctx.guild await guild.create_role(name=group, mentionable=True) overwrites = { guild.default_role: discord.PermissionOverwrite(read_messages=False) } category = discord.utils.get( ctx.guild.categories, name=self.bot.conf["clubs_category"]) await guild.create_text_channel(group, overwrites=overwrites, category=category) await ctx.message.add_reaction(self.bot.conf["emoji_ok"]) #await ctx.send("Club " + group + " created for you! Use !list and !join to join now.") else: #await ctx.send("Club exist already dumkopf.") await ctx.message.add_reaction(self.bot.conf["emoji_nok"]) else: #await ctx.send("Club name must have club- prefix. Try again nakkisormi.") await ctx.message.add_reaction(self.bot.conf["emoji_nok"])
def write_dataset(self, dataset, abs_path): """ Writes dataset to a file Arguments: - dataset: pandas dataframe Dataset to be written - abs_path: str Absolute path to the file where the dataset must be written Returns: - No return value """ logger.info("Creating {0}".format(abs_path)) ext = abs_path.split('.')[-1] if ext == 'csv': dataset.to_csv(abs_path, index=False) elif ext == 'xls': dataset.to_excel(abs_path, index=False) else: logger.error("Expected extensions csv or xls, got {0}".format(ext)) self.error() logger.info("{0} successfully generated".format(abs_path)) return None
def get_backup_evolv_collection(collection): logger.info('get_backup_evolv_collection called') cx = MongoDB(HOST) backup_evolv = cx.getDB('backup_hiera_evolv') documents = '' for document in backup_evolv[collection].find(): print(document) return documents
def get_vision_client_ids(): # open connection logger.info('get_vision_client_ids called') cx = MongoDB(HOST) # get hiera_vision database hiera_vision = cx.getDB('hiera_vision') #return client_ids as json return cx.get_client_ids(hiera_vision)
def get_evolv_client_ids(): logger.info('get_evolv_client_ids called') cx = MongoDB(HOST) # get hiera_vision database hiera_evolv = cx.getDB('hiera_evolv') #return client_ids as json return cx.get_client_ids(hiera_evolv)
def run(self): logger.info("Starting BrewFather Logging") self.stopThread = False while self.stopThread != True: self._readConf() self._update() time.sleep(60) logger.info("BrewFather Monitoring Stopped")
def preprocess(dataset, start_year, end_year, final_dataset): """ 1. Imports dataset 2. Selects only PLWHA without RF in start_year 3. For each disease, if disease in any year from start_year to end_year-1, then disease present in end_year Arguments: - dataset: xls or csv Absolute path to the dataset xls file - start_year: int Starting year - end_year: int Ending year - final_dataset: str Absolute path to the file where the preprocessed data must be written to Returns: - No return values """ h = Helpers() v = Variables() # Diagnostics h.check_file_existence(dataset) h.check_year(start_year, end_year) h.check_dir(os.path.dirname(final_dataset)) # Import dataset data = h.import_dataset(dataset) # Individuals in data who have HIV at start_year data_hiv = data[data["HIV" + str(start_year)] == 1] # Individuals in data_HIV who don't have RF at start_year data_hiv_no_rf = data_hiv[data_hiv["RF" + str(start_year)] == 0] logger.info("{0} PLWHA without RF in {1}".format(data_hiv_no_rf.shape[0], start_year)) # Read diseases from disease file diseases = h.read_feature_file(v.diseases_f) # Make a copy of data_hiv_no_rf data_hiv_no_rf_copy = data_hiv_no_rf.copy() logger.info( "Processing disease values of {0} based on previous years".format( end_year)) # For each disease, set value 1 at end_year if value is 1 at any previous year for year in range(start_year, end_year): for disease in diseases: data_hiv_no_rf_copy.ix[data_hiv_no_rf_copy[disease + str(year)] == 1, [disease + str(end_year)]] = 1 # Save data_hiv_no_rf_copy to final_dataset h.write_dataset(data_hiv_no_rf_copy, final_dataset) return None
def extract_color(self, colorname, boundaries): logger.info("Extracting the {} pathway".format(colorname)) lower, upper = boundaries # create NumPy arrays from the boundaries lower = np.array(lower, dtype="uint8") upper = np.array(upper, dtype="uint8") # find the colors within the specified boundaries # and apply the mask mask_color = cv2.inRange(self.img_hsv, lower, upper) if colorname == "red": lower, upper = RED_UPPER_BOUNDARIES # create NumPy arrays from the boundaries lower = np.array(lower, dtype="uint8") upper = np.array(upper, dtype="uint8") mask_color_neg = cv2.inRange(self.img_hsv, lower, upper) mask_color = cv2.bitwise_or(mask_color, mask_color_neg) if self.debug: cv2.imshow("Color mask", mask_color) cv2.waitKey(0) mask = cv2.bitwise_and(mask_color, self.bkgrd_mask) # Arbitraty number of pixel minimum to validate the color if sum(mask.flatten()) < 1000: logger.warning("Less than 1000 pixel for {}.".format(colorname)) return # Extract colored part colored_part = cv2.bitwise_and(src1=self.img_rgb, src2=self.img_rgb, mask=mask) # Add blur to the gray version gray_image_blur = cv2.GaussianBlur(src=self.img_gray, ksize=(11, 11), sigmaX=0) # Extract the gray part gray_part = cv2.bitwise_and(src1=gray_image_blur, src2=gray_image_blur, mask=cv2.bitwise_not(mask)) # Mix the gray part and the color part extracted_path = colored_part + gray_part # save the extracted_path filename = ''.join("{}_{}.jpg".format(args["image"].split(".")[0], colorname)) # Side by side original picture and the extracted path if self.debug: cv2.imwrite(filename, np.hstack([self.img_rgb, extracted_path])) else: cv2.imwrite(filename, extracted_path)
def vision_environments_upg(client_id): logger.info('get_hiera_vision_client_collection_version called') cx = MongoDB('localhost') id_string = str(client_id) hiera_vision = cx.getDB('hiera_vision') client_document = cx.get_client_collection(hiera_vision, id_string) client_dictionary = json.loads(client_document) environments_UPG = json.dumps(client_dictionary['vision']['environments']['UPG']) return environments_UPG
def vision_rodc_secondary(client_id): logger.info('get_hiera_vision_client_collection_version called') cx = MongoDB('localhost') id_string = str(client_id) hiera_vision = cx.getDB('hiera_vision') client_document = cx.get_client_collection(hiera_vision, id_string) client_dictionary = json.loads(client_document) rodc_secondary = json.dumps(client_dictionary['vision']['rodc']['secondary']) return rodc_secondary
def vision_win_server_timezone(client_id): logger.info('get_hiera_vision_client_collection_version called') cx = MongoDB('localhost') id_string = str(client_id) hiera_vision = cx.getDB('hiera_vision') client_document = cx.get_client_collection(hiera_vision, id_string) client_dictionary = json.loads(client_document) win_server_timezone = json.dumps(client_dictionary['vision']['win_server']['timezone']) return win_server_timezone
def vision_environments_to_deploy_nonprod(client_id): logger.info('get_hiera_vision_client_collection_version called') cx = MongoDB('localhost') id_string = str(client_id) hiera_vision = cx.getDB('hiera_vision') client_document = cx.get_client_collection(hiera_vision, id_string) client_dictionary = json.loads(client_document) vision_environments_to_deploy_nonProd = json.dumps(client_dictionary['vision']['environments_to_deploy_nonProd']) return vision_environments_to_deploy_nonProd
def vision_components_devero_service_enabled(client_id): logger.info('get_hiera_vision_client_collection_version called') cx = MongoDB('localhost') id_string = str(client_id) hiera_vision = cx.getDB('hiera_vision') client_document = cx.get_client_collection(hiera_vision, id_string) client_dictionary = json.loads(client_document) components_devero_service_enabled = json.dumps(client_dictionary['vision']['components']['devero_service_enabled']) return components_devero_service_enabled
def evolv_environments(client_id): logger.info('get_hiera_evolv_client_collection_version called') cx = MongoDB('localhost') id_string = str(client_id) hiera_evolv = cx.getDB('hiera_evolv') client_document = cx.get_client_collection(hiera_evolv, id_string) client_dictionary = json.loads(client_document) evolv_environments = json.dumps(client_dictionary['evolv']['environments']) return evolv_environments
def get_vision_client_web_document(id): logger.info('get_vision_client_web_document called') cx = MongoDB(HOST) id_string = str(id) hiera_vision = cx.getDB('hiera_vision') client_document = cx.get_client_collection(hiera_vision, id_string) client_dictionary = json.loads(client_document) web_document = client_dictionary['vision']['environments']['LIVE']['web'] return json.dumps(web_document)
def get_evolv_client_collection(id): # open connection logger.info('get_vision_client_ids called') id_string = str(id) cx = MongoDB(HOST) # get hiera_vision database hiera_evolv = cx.getDB('hiera_evolv') # return client_ids as json return cx.get_client_collection(hiera_evolv, id_string)
def vision_environments_train_web_app_offline(client_id): logger.info('get_hiera_vision_client_collection_version called') cx = MongoDB('localhost') id_string = str(client_id) hiera_vision = cx.getDB('hiera_vision') client_document = cx.get_client_collection(hiera_vision, id_string) client_dictionary = json.loads(client_document) web_app_offline = json.dumps(client_dictionary['vision']['environments']['TRAIN']['web']['app_offline']) return web_app_offline
def vision_credentials(client_id): logger.info('get_hiera_vision_client_collection_version called') cx = MongoDB('localhost') id_string = str(client_id) hiera_vision = cx.getDB('hiera_vision') client_document = cx.get_client_collection(hiera_vision, id_string) client_dictionary = json.loads(client_document) vision_credentials = json.dumps(client_dictionary['vision']['credentials']) return vision_credentials
def vision_environments_dev_db_restore_db_version(client_id): logger.info('get_hiera_vision_client_collection_version called') cx = MongoDB('localhost') id_string = str(client_id) hiera_vision = cx.getDB('hiera_vision') client_document = cx.get_client_collection(hiera_vision, id_string) client_dictionary = json.loads(client_document) db_restore_db_version = json.dumps(client_dictionary['vision']['environments']['DEV']['db']['restore_db_version']) return db_restore_db_version
def run(self): logger.info("Starting Tilt Monitoring: " + self.color) self.stopThread = False while self.stopThread != True: self._readConf() self._update( ) # calls method that updates data from Tilt if update time interval has lapsed time.sleep(MINIMUM_INTERVAL) logger.info("Tilt Monitoring:" + self.color + " Stopped.")
def vision_environments_live_web_crm_version(client_id): logger.info('get_hiera_vision_client_collection_version called') cx = MongoDB('localhost') id_string = str(client_id) hiera_vision = cx.getDB('hiera_vision') client_document = cx.get_client_collection(hiera_vision, id_string) client_dictionary = json.loads(client_document) web_crm_version = json.dumps(client_dictionary['vision']['environments']['LIVE']['web']['crm_version']) return web_crm_version
async def stats(self, ctx): self.bot.commands_called = self.bot.commands_called + 1 message = "```\n" message += "Commands answered: " + str(self.bot.commands_called) + "\n" message += "Uptime: " + self.uptime() + "\n" message += "```" logger.info("Stats called") await ctx.send(message)