def create_session(self, session_id, model_key): if session_id in self._SESSION_MAP: raise ValueError("session_id %d has already been created" % (session_id)) if model_key not in MODELS: raise ValueError( "%s is not a valid model, check the keys in models.json" % (model_key)) worker = self._WORKER_POOL.get( ) # this will block until we have a free one port = get_free_tcp_port() MODELS[model_key].update({"gpu_id": worker["gpu_id"], "port": port}) if worker["type"] == "local": gpu_id = worker["gpu_id"] process = self._spawn_local_worker(**MODELS[model_key]) model = PytorchUNet(MODELS[model_key]["fn"], gpu_id, MODELS[model_key]["inputShape"]) session = Session(session_id, model) self._SESSION_MAP[session_id] = session self._SESSION_INFO[session_id] = { "worker": worker, "process": process } LOGGER.info("Created a local worker for (%s) on GPU %d" % (session_id, gpu_id)) elif worker["type"] == "remote": raise NotImplementedError("Remote workers aren't implemented yet") else: raise ValueError("Worker type %s isn't recognized" % (worker["type"]))
def save_traffic(traffic_obj_list, id, piece=3000): """ :param traffic_obj_list: :param id: task id :param piece: default 3000 per piece :return: """ traffic_path = Engine.get_traffic_path(id) if len(traffic_obj_list) > 0: saved_traffic_list = [i for i in traffic_obj_list] # slice traffic if too large if len(saved_traffic_list) > piece: traffic_divided_path = [] traffic_divided = divide_list(saved_traffic_list, piece) for i in range(len(traffic_divided)): traffic_divided_path.append(traffic_path + str(i)) with open(traffic_path + str(i), 'w') as traffic_f: cPickle.dump(traffic_divided[i], traffic_f) LOGGER.info('Traffic of %s has been divided and saved to %s.' % (id, ','.join(traffic_divided_path))) else: with open(traffic_path, 'w') as traffic_f: cPickle.dump(saved_traffic_list, traffic_f) LOGGER.info('Traffic of %s has been saved to %s.' % (id, traffic_path))
def save_analysis(self): LOGGER.info( 'Total multipart is: %s,redirect is: %s,request exception is: %s' % (len(MULTIPART), len(REDIRECT), len(REQUEST_ERROR))) self.save_multipart() self.save_redirect() self.save_request_exception()
def get_pages(self) -> Optional[dict]: """ Fetch Ghost pages. :returns: Optional[dict] """ try: headers = { "Authorization": f"Ghost {self.session_token}", "Content-Type": "application/json", } endpoint = f"{self.admin_api_url}/pages" resp = requests.get(endpoint, headers=headers) if resp.json().get("errors") is not None: LOGGER.error( f"Failed to fetch Ghost pages: {resp.json().get('errors')[0]['message']}" ) return None post = resp.json()["pages"] LOGGER.info(f"Fetched Ghost pages` ({endpoint})") return post except HTTPError as e: LOGGER.error(f"Ghost HTTPError while fetching pages: {e}") except KeyError as e: LOGGER.error(f"KeyError for `{e}` occurred while fetching pages") except Exception as e: LOGGER.error( f"Unexpected error occurred while fetching pages: {e}")
async def optimize_post_image(post_update: PostUpdate) -> PlainTextResponse: """ Generate retina version of a post's feature image if one doesn't exist. :param PostUpdate post_update: Incoming payload for an updated Ghost post. :returns: PlainTextResponse """ new_images = [] post = post_update.post.current feature_image = post.feature_image title = post.title if feature_image: new_images.append(images.create_retina_image(feature_image)) new_images.append(images.create_mobile_image(feature_image)) new_images = [image for image in new_images if image is not None] if bool(new_images): LOGGER.info( f"Generated {len(new_images)} images for post `{title}`: {new_images}" ) return PlainTextResponse(f"{post.title}: {new_images}") return PlainTextResponse( content=f"Retina & mobile images already exist for {post.title}.") return PlainTextResponse( content=f"Post `{post.slug}` ignored; no image exists for optimization." )
def _https_session(self) -> None: """Authorize HTTPS session with Ghost admin.""" endpoint = f"{self.admin_api_url}/session/" headers = {"Authorization": self.session_token} resp = requests.post(endpoint, headers=headers) LOGGER.info( f"Authorization resulted in status code {resp.status_code}.")
def new_ghost_subscription( user: NetlifyAccount) -> Optional[Dict[str, List[Dict]]]: """ Create Ghost member from Netlify identity signup. :param NetlifyAccount user: New user account from Netlify auth. :returns: Optional[str, Dict[List[Dict]]] """ body = { "accounts": [{ "name": user.user_metadata.full_name, "email": user.email, "note": "Subscribed from Netlify", "subscribed": True, "comped": False, "labels": user.user_metadata.roles, }] } response, code = ghost.create_member(body) if code != 200: error_type = response["errors"][0]["type"] if error_type == "ValidationError": LOGGER.info( f"Skipped Ghost member creation for existing user: {user.user_metadata.full_name} <{user.email}>" ) else: LOGGER.success( f"Created new Ghost member: {user.user_metadata.full_name} <{user.email}>" ) return body
def purge_unwanted_images(self, folder: str) -> List[str]: """ Delete images which have been compressed or generated multiple times. :param str folder: Directory to recursively apply image transformations. :returns: List[str] """ images_purged = [] LOGGER.info("Purging unwanted images...") substrings = [ "@2x@2x", "_o", "psd", "?", "@2x-", "-1-1", "-1-2", ".webp", "_retina/_retina", "_retina/_mobile/", ] blobs = self.get( folder, ) image_blob_names = [blob.name for blob in blobs] for image_blob_name in image_blob_names: if any(substr in image_blob_name for substr in substrings): self.bucket.delete_blob(image_blob_name) images_purged.append(image_blob_name) LOGGER.info(f"Deleted {image_blob_name}.") return images_purged
async def member_unsubscribe(subscriber: Subscriber): """ Log user unsubscribe events. :param Subscriber subscriber: Current Ghost newsletter subscriber. """ subscriber = subscriber.previous LOGGER.info(f"`{subscriber.name}` unsubscribed from newsletter.")
def _upload_ssh_key(self): try: system( f"ssh-copy-id -i {self.ssh_key_filepath}.pub {self.user}@{self.host}>/dev/null 2>&1" ) LOGGER.info(f"{self.ssh_key_filepath} uploaded to {self.host}") except FileNotFoundError as error: LOGGER.error(error)
def function_timer(*args, **kwargs): LOGGER.info("Start running {0} ...".format(fn.__name__)) t0 = time.time() result = fn(*args, **kwargs) t1 = time.time() LOGGER.info("Total time running {0}: {1} seconds".format( fn.__name__, round(t1 - t0, 3))) return result
def run(self, version): self.VERSION = version LOGGER.debug('Running setup') self.setup() with open('./lib/bot/token.0', 'r', encoding="utf-8") as tf: self.TOKEN = tf.read().strip() LOGGER.info("Running bot") super().run(self.TOKEN, reconnect=True)
def _remove_repeat_blobs(self, image_blobs): images_purged = [] r = re.compile("-[0-9]-[0-9]@2x.jpg") repeat_blobs = list(filter(r.match, image_blobs)) for repeat_blob in repeat_blobs: self.bucket.delete_blob(repeat_blob) images_purged.append(repeat_blob) LOGGER.info(f"Deleted {repeat_blob}")
def _get_ssh_key(self): """ Fetch locally stored SSH key.""" try: self.ssh_key = RSAKey.from_private_key_file(self.ssh_key_filepath) LOGGER.info(f"Found SSH key at self {self.ssh_key_filepath}") return self.ssh_key except SSHException as e: LOGGER.error(e)
def log_info(fmt, *args): """deprecated""" warnings.warn("log_info is deprecated, use LOGGER.info instead", DeprecationWarning, stacklevel=2) if args and len(args) > 0: LOGGER.info(fmt.format(*args)) else: LOGGER.info(fmt)
async def github_pr(request: Request) -> JSONResponse: """ Send SMS and Discord notifications upon PR creation in HackersAndSlackers Github projects. :param Request request: Incoming Github payload for newly opened PR. :returns: JSONResponse """ payload = await request.json() action = payload.get("action") user = payload["sender"].get("login") pull_request = payload["pull_request"] repo = payload["repository"] if user in (settings.GITHUB_USERNAME, "dependabot-preview[bot]", "renovate[bot]"): return JSONResponse({ "pr": { "id": pull_request["number"], "time": get_current_time(), "status": "ignored", "trigger": { "type": "github", "repo": repo["full_name"], "title": pull_request["title"], "user": user, "action": action, }, } }) message = f'PR {action} for `{repo["name"]}`: \n \ {pull_request["title"]} \ {pull_request["body"]} \ {pull_request["url"]}' sms_message = sms.send_message(message) LOGGER.info(f"Github PR {action} for {repo['name']} generated SMS message") return JSONResponse({ "pr": { "id": pull_request["number"], "time": get_current_time(), "status": sms_message.status, "trigger": { "type": "github", "repo": repo["full_name"], "title": pull_request["title"], "user": user, "action": action, }, }, "sms": { "phone_recipient": sms_message.to, "phone_sender": sms_message.from_, "date_sent": sms_message.date_sent, "message": sms_message.body, }, })
def check_install(): try: br=webdriver.Chrome() except Exception, e: LOGGER.info(e) try: br=webdriver.PhantomJS() except Exception, e: LOGGER.info(e) LOGGER.warn('No browser is installed correctly!')
def _upload_ssh_key(self): try: system( f"ssh-copy-id -i {self.ssh_key_filepath}.pub {self.user}@{self.host}>/dev/null 2>&1" ) LOGGER.info(f"{self.ssh_key_filepath} uploaded to {self.host}") except FileNotFoundError as error: LOGGER.error(error) except Exception as e: LOGGER.error(f"Unexpected error occurred: {e}") raise e
def parse_by_token(data): result = {} split_symbol = ',' data = re.sub(r'[\\\'\"{}\[\]]', '', data) if ',' in data: groups = data.split(split_symbol) for i in groups: if ':' in i: k, v = i.split(':')[0], i.split(':')[1] result[k] = v return result else: LOGGER.info('Can\'t parse body:\n%s' % data)
async def test_orm(db: Session = Depends(get_db)) -> JSONResponse: """ Test endpoint for fetching comments joined with user info. :param Session db: ORM Database session. :returns: JSONResponse """ all_comments = db.query(Comment).join(Account, Comment.user_id == Account.id).all() for comment in all_comments: LOGGER.info(comment.user) return JSONResponse(all_comments)
def execute_commands(self, commands: List[str]): """ Execute multiple commands in succession. :param commands: List of unix commands as strings. :type commands: List[str] """ for cmd in commands: stdin, stdout, stderr = self.client.exec_command(cmd) stdout.channel.recv_exit_status() response = stdout.readlines() for line in response: LOGGER.info(f"INPUT: {cmd} | OUTPUT: {line}")
def send_message(self, message_body: str) -> MessageInstance: """ Send Twilio message. :param str message_body: Content of SMS message to send. :returns: MessageInstance """ LOGGER.info(f"SMS triggered by post edit: {message_body}") sms_message = self.client.messages.create(to=self.recipient, from_=self.sender, body=message_body) return sms_message
def update_html_ssl_links(html: str, body: dict, slug: str) -> dict: """ Replace hyperlinks in post with SSL equivalents. :param str html: Raw post html. :param dict body: JSON body representing Ghost post. :param str slug: Unique post identifier for logging purposes. :returns: dict """ html = html.replace("http://", "https://") body["posts"][0].update({"html": html}) LOGGER.info(f"Replaced unsecure links in post `{slug}`") return body
def bulk_upload(self, files: List[str]): """ Upload multiple files to a remote directory. :param files: List of local files to be uploaded. :type files: List[str] """ try: self.scp.put(files, remote_path=self.remote_path, recursive=True) LOGGER.info( f"Finished uploading {len(files)} files to {self.remote_path} on {self.host}" ) except SCPException as e: raise e
def put_queue(self): traffic_path = [] files = os.listdir(TRAFFIC_DIR) for i in files: if re.search(self.id + '.traffic\d*', i): traffic_path.append(os.path.join(TRAFFIC_DIR, i)) for i in traffic_path: with open(i) as f: traffic_list = cPickle.load(f) LOGGER.info( 'Start to put traffic( used %s) into traffic_queue,Total is %s.' % (i, len(traffic_list))) for traffic in traffic_list: traffic_queue.put(traffic)
def run_experiment(predicted_results, settings, limit, predicted_tag_count): """ Run the experiment with configuration """ tags_info = settings["tags_info"] sample_count = config.CLASSIFIER["sample_count"] # predicted_tag_count = settings["predicted_tag_count"] LOGGER.debug("Sample count: %d" % sample_count) LOGGER.debug("Max predicted tag count: %d" % predicted_tag_count) get_similarity = settings["get_similarity"] # run the test for index, predict_result in enumerate(predicted_results): if index > limit: break try: LOGGER.debug("%d/%d sample" % (index, sample_count)) orignal, scored_predicted = predict_result # TODO: HARD CODED Code again. if settings["should_rerank"]: scored_predicted = rerank_tags(scored_predicted[:30], get_similarity) scored_predicted = scored_predicted[:predicted_tag_count] predicted = [t for t, s in scored_predicted] # TODO: SOME PROBLEM may raise here predicted = predicted[:predicted_tag_count] for name, evaluator in settings["evaluators"].items(): evaluation = evaluator.update(orignal, predicted) log_message = "\nOriginal Result: %s\n"\ "Predicted Result: %s\n"\ "Evaluator Type: %s\n"\ "\tPrecision: %f\n"\ "\tRecall: %f\n" % ( str(to_named_tags(orignal, tags_info)), str(to_named_tags(predicted, tags_info)), name, evaluation[0], evaluation[1]) LOGGER.debug(log_message) except Exception as e: LOGGER.error("Error occurs %s" % (str(e))) evaluations = [] for name, evaluator in settings["evaluators"].items(): evaluation = evaluator.get_evaluation() LOGGER.info("%s Precision: %f\t Recall: %f" % (name, evaluation[0], evaluation[1])) evaluations.append(evaluation) return evaluations
def update_metadata_images(feature_image: str, body: dict, slug: str) -> dict: """ Update OG and Twitter images to match feature image. :param str feature_image: Post feature image url. :param dict body: JSON body representing Ghost post. :param str slug: Unique post identifier. :returns: dict """ body["posts"][0].update({ "og_image": feature_image, "twitter_image": feature_image }) LOGGER.info(f"Updated metadata images for post `{slug}`") return body
def mobile_transformations(self, folder: str) -> List[Optional[str]]: """ Create mobile image variants of standard-res images. :param str folder: Directory to recursively apply image transformations. :returns: List[Optional[str]] """ images_transformed = [] image_blobs = self.get_standard_blobs(folder) LOGGER.info(f"Creating mobile variants for {len(image_blobs)} images...") for image_blob in image_blobs: mobile_image_blob = self.create_mobile_image(image_blob) if mobile_image_blob is not None: images_transformed.append(mobile_image_blob.name) return images_transformed
def initial(self): for corpus_preprocessor in self.corpus_preprocessors: LOGGER.info(corpus_preprocessor.name) for (src, tgt) in corpus_preprocessor.input_fn(max_len=MAX_LEN, threshold=THRESHOLD, min_len=MIN_LEN): src_tk = self.tokenizer.tokenize(src.strip()) tgt_tk = self.tokenizer.tokenize(tgt.strip()) assert len(src_tk) > 0 assert len(tgt_tk) > 0 self.ex_list.append((src_tk, tgt_tk)) self.c += 1 self.current_sample_num += 1 if self.current_sample_num == self.sub_dataset_num: break if self.current_sample_num == self.sub_dataset_num: self.current_sample_num = 0 break
def manage_sessions(): '''This method is called before every request. Checks to see if there a session associated with the current request. If there is then update the last interaction time on that session. ''' if SESSION_HANDLER.is_expired( bottle.request.session.id ): # Someone is trying to use a session that we have deleted due to inactivity SESSION_HANDLER.cleanup_expired_session(bottle.request.session.id) bottle.request.session.delete( ) # TODO: I'm not sure how the actual session is deleted on the client side LOGGER.info("Cleaning up an out of date session") elif not SESSION_HANDLER.is_active(bottle.request.session.id): LOGGER.warning( "We are getting a request that doesn't have an active session") else: SESSION_HANDLER.touch_session( bottle.request.session.id ) # let the SESSION_HANDLER know that this session has activity
def save(result, id): result_dict = {} if result: for vul, location, poc in result: LOGGER.warn('%s found in: %s\n' % (vul, location)) if vul in result_dict.keys(): result_dict[vul].append((location, poc)) else: result_dict[vul] = [] result_dict[vul].append((location, poc)) print_result_table(result) result_file = os.path.join( RESULT_DIR, id + '-' + datetime.datetime.now().strftime("%Y_%m_%d_%H_%M_%S") + '.json') with open(result_file, 'w') as json_f: json.dump(result_dict, json_f) LOGGER.info('The result of %s has been saved to %s' % (id, result_file))
for predicted_tag_count in EXPERIMENT_CONFIG["predicted_tag_count"]: if EXPERIMENT_CONFIG["is_from_classifier"]: predict_results = prediction.get_sample_results_by_naive_bayes( classifier, tags_info, words_info) else: predict_results = prediction.get_predicted_results_from_file(EXPERIMENT_CONFIG["NAME"]) kl_precision = 0 kl_recall = 0 basket_precision = 0 basket_recall = 0 print "=====================" print "Running Test with tag %d"% predicted_tag_count sys.stdout.flush() for index in xrange(group_count): LOGGER.info("\tRun Test %d" % index) evaluation = run_experiment(predict_results, EXPERIMENT_CONFIG, EXPERIMENT_CONFIG["sample_count"], predicted_tag_count) print "*********************" print "Running Test %d" % index """ print "\t\tBasket Precision %f" % evaluation[0][0] print "\t\tBasket Recall %f" % evaluation[0][1] print "------" """ print "\t\KL Precision %f" % evaluation[1][0] print "\t\KL Recall %f" % evaluation[1][1] sys.stdout.flush() kl_precision += evaluation[1][0]
def __init__(self, basket_info_file = "", train_data_file = "", support = 10): """ ***Note***: If both basket_info_file and train_data_file exist, ignore the "train_data_file". """ evaluator.Evaluator.__init__(self) # -- Create basket info from test data if not os.path.exists(basket_info_file): LOGGER.info("Basket info file %s not found" % basket_info_file) LOGGER.info("Get basket form training data...") baskets = _create_baskets(train_data_file) tags_info = analyse_baskets(baskets) LOGGER.info( "Writing back the basket info to " + basket_info_file) # Save the tags info to the disk pickle.dump(tags_info, open(basket_info_file, "wb")) else: LOGGER.info("basket info file %s found" % basket_info_file) LOGGER.info("Loading the basket_info_file ...") # Read the tag info from the file tags_info = pickle.load(open(basket_info_file, "rb")) LOGGER.info("Basket info read!") self.tag_counts, self.cooccurrences, self.total_count = tags_info self.support = support