def dash(detached, port, host, debug, **kwargs): ''' Run the dash server for displaying HTML statistics. ''' setup_logger(kwargs['quiet'], kwargs['verbosity'], 'dash') if detached: logger.warn('Dash app detached mode not yet implemented!') logger.info('Falling back to non-detached mode.') dash_app.app.run_server(debug=debug, dev_tools_hot_reload=debug, host=host)
async def authenticate_user(token: str): """Verifies that the token encodes a known username and returns the user The username is encoded in a JSON web token which will first be decoded and the corresponding entry is read from the database. In case of success the user is returned. """ db = database.SessionLocal() # open database session defined try: payload = jwt.decode(token, SECRET_KEY, algorithms=[JWT_ALGORITHM]) username: str = payload.get('sub') # the subject is the username if username is None: raise credentials_exception # obtain user from database user = crud.get_user_by_username(db, username=username) except jwt.PyJWTError: logger.warn('PyJWTError') raise credentials_exception except: logger.warn('Unknown error') raise credentials_exception finally: db.close() # close database session if user is None: raise credentials_exception return user
def creature_pa_get(creatureid): if request.headers.get('Authorization') != f'Bearer {API_INTERNAL_TOKEN}': msg = f'Token not authorized' logger.warn(msg) return jsonify({"success": False, "msg": msg, "payload": None}), 403 # Pre-flight checks creature = fn_creature_get(None,creatureid)[3] if creature is None: return jsonify({"success": False, "msg": f'Creature unknown (creatureid:{creatureid})', "payload": None}), 200 try: creature_pa = RedisPa(creature).get() except Exception as e: msg = f'PA Query KO (creatureid:{creature.id}) [{e}]' logger.error(msg) return jsonify({"success": False, "msg": msg, "payload": None}), 200 else: if creature_pa: msg = f'PA Query OK (creatureid:{creature.id})' logger.debug(msg) return jsonify({"success": True, "msg": msg, "payload": {"pa": creature_pa, "creature": creature}}), 200 else: msg = f'PA Query KO (creatureid:{creature.id})' logger.warning(msg) return jsonify({"success": False, "msg": msg, "payload": None}), 200
async def get_current_user(db: Session = Depends(get_db), token: str = Depends(oauth2_scheme)): """Decode the user from the access token. The current user is "injected" into methods below as a dependency. This means that the user variable is assigned to the authenticated user encoded in the access token automatically by FastAPI. Parameters ---------- db : sqlalchemy.orm.Session database session injected by FastAPI as a dependency token : str Access token encoding the username injected by FastAPI. It is retrieved from a cookie by the oauth2_scheme defined in oauth2withcookies.py. """ try: payload = jwt.decode(token, SECRET_KEY, algorithms=[JWT_ALGORITHM]) username: str = payload.get('sub') if username is None: raise credentials_exception token_data = models.token.TokenData(username=username) except jwt.PyJWTError: logger.warn('PyJWTError') raise credentials_exception user = get_user(db, username=token_data.username) if user is None: raise credentials_exception return user
def do_it(name: str, content: str, public: bool): files = [f'/tmp/slack-message-tmp-{generate_rand_alphanumeric(9)}'] with open(files[0], 'w') as tmp_file: tmp_file.write(content) gist_env = os.environ.copy() if not closure.you_get_my_jist: # Lazy load closure.you_get_my_jist = \ requests.get(YOU_GET_MY_JIST_URL).text.strip() gist_env['YOU_GET_MY_JIST'] = closure.you_get_my_jist if os.path.dirname(sys.executable) not in os.environ['PATH']: gist_env['PATH'] = os.path.dirname( sys.executable) + ':' + gist_env['PATH'] opts = '--public' if public else '' cmd = 'gist {opts} create {gist_name} {files}' filelist = ' '.join('"%s"' % f for f in files) cmd = cmd.format(gist_name=name, files=filelist, opts=opts) output, ret_code = run_command(cmd, env=gist_env, verbose=True) if ret_code != 0: log.warn('Could not upload gist. \n%s' % (output, )) url = output if ret_code == 0 else None os.remove(files[0]) return url
def process_one_video(video_url, temp_video_file, wav_path): """Download video, extract wav, delete video """ ffmpeg_params = "ffmpeg -y -i '{}' -ac {} -ar {} -vn '{}'".format( temp_video_file, AUDIO_PARAMS['channels'], AUDIO_PARAMS['sampling_rate'], wav_path) if DEBUG: print(ffmpeg_params) if True: download_progress(video_url, temp_video_file) stdout = subprocess.Popen(ffmpeg_params, shell=True, stdout=subprocess.PIPE).stdout.read() msg = 'Decoding url {} stdout \n {}'.format(video_url, str(stdout)) logger.info(msg) if os.path.isfile(temp_video_file): os.remove(temp_video_file) else: logger.warn('File {} not found for deletion'.format(temp_video_file)) return wav_path
def creature_add(): if request.headers.get('Authorization') != f'Bearer {API_INTERNAL_TOKEN}': msg = f'Token not authorized' logger.warn(msg) return jsonify({"success": False, "msg": msg, "payload": None}), 403 if not request.is_json: msg = f'Missing JSON in request' logger.warn(msg) return jsonify({"msg": msg, "success": False, "payload": None}), 400 try: creature = fn_creature_add(None, request.json.get('raceid'), request.json.get('gender'), None, request.json.get('rarity'), request.json.get('x'), request.json.get('y'), request.json.get('instanceid')) except Exception as e: msg = f'Creature Query KO [{e}]' logger.error(msg) return jsonify({"success": False, "msg": msg, "payload": None}), 200 else: msg = f'Creature creation OK' logger.debug(msg) return jsonify({"success": True, "msg": msg, "payload": creature}), 201
def _derive_coco_results(self, coco_eval, iou_type): """ Derive the desired score numbers from summarized COCOeval. Args: coco_eval (None or COCOEval): None represents no predictions from model. iou_type (str): class_names (None or list[str]): if provided, will use it to predict per-category AP. Returns: a dict of {metric name: score} """ metrics = ["AP", "mMR", "Recall"] if coco_eval is None: logger.warn("No predictions from the model! Set scores to -1") return {metric: -1 for metric in metrics} # the standard metrics results = { metric: coco_eval[idx] for idx, metric in enumerate(metrics) } small_table = create_small_table(results) logger.info("Evaluation results for {}: \n".format(iou_type) + small_table) # if class_names is None or len(class_names) <= 1: return results, small_table
def crack(self, user, pswd): # self.browser.execute_script("document.body.style.transform='scale(0.9)';") self.browser.set_window_size(1024, 768) self.send_infos(user, pswd) button = self.get_verify_button() button.click() # 开始识别码 image = self.get_verify_image() bytes_array = io.BytesIO() image.save(bytes_array, "PNG") sleep(10) result = self.chaojiying.PostPic(bytes_array.getvalue(), CHAOJIYING_KIND) pprint(result) locations = self.get_points(result) self.touch_click_words(locations) self.touch_click_verify() sleep(3) try: success = self.driver_wait.until( EC.text_to_be_present_in_element( (By.CLASS_NAME, "bilifont bili-icon_dingdao_zhuzhan"), "主站")) pprint(success) except Exception as e: logger.warn("登录失败。", e) finally: self.browser.quit()
def strip_espeak(fpath_in, lang, fpath_unknown_phones): assert locale.getlocale()[1] == 'UTF-8', 'Should be using UTF-8 locale!' with open(fpath_in) as fh: text = fh.read() lst_word_n_prons = text.splitlines() newtext = [] set_unknown_prons = set() cnt_words_with_unknown_prons = 0 for line in lst_word_n_prons: line = ptn_punct.sub('', line) line = line.replace('_', ' ') line = ptn_brack.sub('', line) line = re.sub("\([a-z-]+\)", "", line) line = line.split() word, phones = line[0], line[1:] tmp = [] for c in phones: if lang == 'de': if c == 'ɾ': c = 'r' else: if c == 'ɾ': c = 't' c = espeak_mappings.get(c, c) cs = compound_splitting.get(c, [c]) for c in cs: tmp.append(c) phones = [] for c in tmp: c = toascii_mappings.get(c, c) c = final_mappings.get(c, c) if c is not None: phones.append(c) for c in phones: if c not in valid_prons: logger.info(f'Unknown pron {c}') if c not in eng_model_prons: if fpath_unknown_phones is None: logger.warn(f'New unknown pron {c}, not using word: {word}') break else: cnt_words_with_unknown_prons += 1 if c not in set_unknown_prons: set_unknown_prons.add(c) else: line = word + ' ' + ' '.join(phones) if word == '<unk>': line = word + ' jnk' newtext.append(line) else: newtext.append(line) logger.info(f'Num words with unknown prons / Num words total: {cnt_words_with_unknown_prons} / {len(lst_word_n_prons)}') logger.info(f'Num unknown prons found: {len(set_unknown_prons)}') newtext = list(set(newtext)) return newtext, set_unknown_prons
def creature_effect_add(creatureid,effectmetaid): if request.headers.get('Authorization') != f'Bearer {API_INTERNAL_TOKEN}': msg = f'Token not authorized' logger.warn(msg) return jsonify({"success": False, "msg": msg, "payload": None}), 403 if not request.is_json: msg = f'Missing JSON in request' logger.warn(msg) return jsonify({"msg": msg, "success": False, "payload": None}), 400 duration = request.json.get('duration') sourceid = request.json.get('sourceid') if not isinstance(duration, int): return jsonify({"success": False, "msg": f'Duration should be an INT (duration:{duration})', "payload": None}), 200 if not isinstance(sourceid, int): return jsonify({"success": False, "msg": f'Source ID should be an INT (sourceid:{sourceid})', "payload": None}), 200 # Pre-flight checks creature = fn_creature_get(None,creatureid)[3] if creature is None: return jsonify({"success": False, "msg": f'Creature unknown (creatureid:{creatureid})', "payload": None}), 200 source = fn_creature_get(None,sourceid)[3] if source is None: return jsonify({"success": False, "msg": f'Creature unknown (sourceid:{sourceid})', "payload": None}), 200 # Effect add try: creature_effect = add_effect(creature,duration,effectmetaid,source) creature_effects = get_effects(creature) except Exception as e: msg = f'Effect Query KO [{e}]' logger.error(msg) return jsonify({"success": False, "msg": msg, "payload": None}), 200 else: if creature_effect and creature_effects: msg = f'Effect add OK (creatureid:{creature.id})' logger.debug(msg) return jsonify({"success": True, "msg": msg, "payload": {"effects": creature_effects, "creature": creature}}), 200 else: msg = f'Effect add KO (creatureid:{creature.id})' logger.warning(msg) return jsonify({"success": False, "msg": msg, "payload": None}), 200
def test(cls, cfg, model, evaluators=None, output_folder=None): """ Args: cfg (config dict): model (nn.Module): evaluators (list[DatasetEvaluator] or None): if None, will call :meth:`build_evaluator`. Otherwise, must have the same length as ``cfg.DATASETS.TEST``. Returns: dict: a dict of result metrics """ if isinstance(evaluators, DatasetEvaluator): evaluators = [evaluators] if evaluators is not None: assert len( cfg.DATASETS.TEST) == len(evaluators), "{} != {}".format( len(cfg.DATASETS.TEST), len(evaluators)) results = OrderedDict() for idx, dataset_name in enumerate(cfg.DATASETS.TEST): data_loader = cls.build_test_loader(cfg) # When evaluators are passed in as arguments, # implicitly assume that evaluators can be created before data_loader. if evaluators is not None: evaluator = evaluators[idx] else: try: evaluator = cls.build_evaluator( cfg, dataset_name, data_loader.dataset, output_folder=output_folder) except NotImplementedError: logger.warn( "No evaluator found. Use `DefaultRunner.test(evaluators=)`, " "or implement its `build_evaluator` method.") results[dataset_name] = {} continue results_i = inference_on_dataset(model, data_loader, evaluator) if cfg.TEST.ON_FILES: results_i = inference_on_files(evaluator) else: results_i = inference_on_dataset(model, data_loader, evaluator) results[dataset_name] = results_i if comm.is_main_process(): assert isinstance( results_i, dict ), "Evaluator must return a dict on the main process. Got {} instead.".format( results_i) logger.info("Evaluation results for {} in csv format:".format( dataset_name)) print_csv_format(results_i) if len(results) == 1: results = list(results.values())[0] return results
def run(self): for doc_id in self.doc_ids: is_done = self._save_disclosure_file(doc_id) if is_done == True: logger.info(f"[save ok] {doc_id}") else: logger.warn( f"[save ng] requests status code is {res.status_code}, {doc_id}" )
def _buildTeachers(self): _reader = CsvParser(self.path + "/" + TEACHER_ID_NAME) _teacherid = {} for line in _reader.readLine(): if line is not None and len(line) == 2: _teacherid[int(line[0])] = line[1] else: log.warn("Incorrect format: {}".format(line)) self.timeTable.teacherIDMap = _teacherid
def _buildCourses(self): _reader = CsvParser(self.path + "/" + COURSES_ID_NAME) _courseid = {} for line in _reader.readLine(): if line is not None and len(line) == 2: _courseid[int(line[0])] = line[1] else: log.warn("Incorrect format: {}".format(line)) self.timeTable.courseIDMap = _courseid
def creature_stats(creatureid): if request.headers.get('Authorization') != f'Bearer {API_INTERNAL_TOKEN}': msg = f'Token not authorized' logger.warn(msg) return jsonify({"success": False, "msg": msg, "payload": None}), 403 # Pre-flight checks try: creature = fn_creature_get(None, creatureid)[3] except Exception as e: msg = f'Creature Query KO (creatureid:{creature.id}) [{e}]' logger.error(msg) return jsonify({"success": False, "msg": msg, "payload": None}), 200 else: if not creature: msg = f'Creature Query KO - Not Found (creatureid:{creature.id})' logger.warning(msg) return jsonify({ "success": False, "msg": msg, "payload": None }), 200 try: # We check if we have the data in redis cached_stats = RedisStats(creature).as_dict() if cached_stats: # Data was in Redis, so we return it creature_stats = cached_stats else: # Data was not in Redis, so we compute it generated_stats = RedisStats(creature).refresh().dict if generated_stats: # Data was computed, so we return it creature_stats = generated_stats else: msg = f'Stats computation KO (pcid:{pc.id})' logger.error(msg) return jsonify({ "success": False, "msg": msg, "payload": None }), 200 except Exception as e: msg = f'Stats Query KO (creatureid:{creature.id}) [{e}]' logger.error(msg) return jsonify({"success": False, "msg": msg, "payload": None}), 200 else: return jsonify({ "success": True, "msg": f'Stats Query OK (creatureid:{creature.id})', "payload": { "stats": creature_stats, "creature": creature } }), 200
def delete_index(index_name: str): """Delete the terms index""" if not index_name: logger.warn("No index name given to delete") return None result = es.indices.delete(index=index_name, ignore_unavailable=True) return result
def creature_pa_consume(creatureid,redpa,bluepa): if request.headers.get('Authorization') != f'Bearer {API_INTERNAL_TOKEN}': msg = f'Token not authorized' logger.warn(msg) return jsonify({"success": False, "msg": msg, "payload": None}), 403 # Pre-flight checks creature = fn_creature_get(None,creatureid)[3] if creature is None: return jsonify({"success": False, "msg": f'Creature unknown (creatureid:{creatureid})', "payload": None}), 200 if redpa > 16 or bluepa > 8: msg = f'Cannot consume more than max PA (creatureid:{creature.id},redpa:{redpa},bluepa:{bluepa})' logger.warning(msg) return jsonify({"success": False, "msg": msg, "payload": None}), 200 if redpa < 0 or bluepa < 0: msg = f'Cannot consume PA < 0 (creatureid:{creature.id},redpa:{redpa},bluepa:{bluepa})' logger.warning(msg) return jsonify({"success": False, "msg": msg, "payload": None}), 200 if redpa > RedisPa(creature).get()['red']['pa'] or bluepa > RedisPa(creature).get()['blue']['pa']: msg = f'Cannot consume that amount of PA (creatureid:{creature.id},redpa:{redpa},bluepa:{bluepa})' logger.warning(msg) return jsonify({"success": False, "msg": msg, "payload": None}), 200 try: ret = RedisPa(creature).set(redpa,bluepa) creature_pa = RedisPa(creature).get() except Exception as e: msg = f'PA Query KO - Failed (creatureid:{creatureid},redpa:{redpa},bluepa:{bluepa})' logger.error(msg) return jsonify({"success": False, "msg": msg, "payload": None}), 200 else: if ret: msg = f'PA Query OK (creatureid:{creatureid},redpa:{redpa},bluepa:{bluepa})' logger.debug(msg) return jsonify({"success": True, "msg": msg, "payload": {"pa": creature_pa, "creature": creature}}), 200 else: msg = f'PA Query KO (creatureid:{creatureid},redpa:{redpa},bluepa:{bluepa})' logger.warning(msg) return jsonify({"success": False, "msg": msg, "payload": None}), 200
def convertResultToCommentList(result, reddit): resultData = [] try: submission = reddit.submission(url=result.link) submission.comments.replace_more(REPLACE_MORE_LIMIT) for comment in submission.comments.list(): if (filterCommentForRelevancy(comment)): resultData.extend(buildRowFromComment(comment)) except praw.exceptions.ClientException: logger.warn("Google search returned non submission:" + result.link) return resultData
def speed(self): speed = self.obj.get("speed", 5) if speed < 1: logger.warn("Codio speed < 1, setting it to 1") speed = 1 elif speed > 10: logger.warn("Codio speed > 10, setting it to 10") speed = 10 return 11 - speed
def show_collection(): """Show collection info.""" if COLLECTION_HOME is not None: logger.info(f'Collection "{COLLECTION_TITLE}" info:') logger.info(f" directory: {COLLECTION_HOME}") if METADATA_HOME is not None: logger.info(f' metadata repository: "{METADATA_HOME}"') else: logger.warn(" metadata repository is not set.") else: logger.error("No collection found.") sys.exit(1)
def load_feed(self, cam_input): if "video" in self._input_type: self.cap = cv2.VideoCapture(self.input_feed) elif "image" in self._input_type: self.cap = cv2.imread(self.input_feed) elif "cam" in self.input_feed.lower(): self._input_type = self.input_feed self.cap = cv2.VideoCapture(cam_input) else: msg = f"Source: {self.input_feed} not supported!" logger.warn(msg) raise FormatNotSupported(msg) logger.info(f"Loaded input source type: {self._input_type}")
def creature_status_del(creatureid, statusmetaid): if request.headers.get('Authorization') != f'Bearer {API_INTERNAL_TOKEN}': msg = f'Token not authorized' logger.warn(msg) return jsonify({"success": False, "msg": msg, "payload": None}), 403 # Pre-flight checks creature = fn_creature_get(None, creatureid)[3] if creature is None: return jsonify({ "success": False, "msg": f'Creature unknown (creatureid:{creatureid})', "payload": None }), 200 # Status del try: creature_status = del_status(creature, statusmetaid) creature_statuses = get_statuses(creature) except Exception as e: msg = f'Status Query KO [{e}]' logger.error(msg) return jsonify({"success": False, "msg": msg, "payload": None}), 200 else: if creature_status > 0: msg = f'Status del OK (creatureid:{creature.id},statusmetaid:{statusmetaid})' logger.debug(msg) return jsonify({ "success": True, "msg": msg, "payload": { "statuses": creature_statuses, "creature": creature } }), 200 elif creature_status == 0: msg = f'Status del KO - Status Not Found (creatureid:{creature.id},statusmetaid:{statusmetaid})' logger.warning(msg) return jsonify({ "success": False, "msg": msg, "payload": None }), 200 else: msg = f'Status del KO - Failed (creatureid:{creature.id},statusmetaid:{statusmetaid})' logger.warning(msg) return jsonify({ "success": False, "msg": msg, "payload": None }), 200
def start(self): ormb_file_path = os.path.join(self.model_root_path, self._serving_name, 'ormbfile.yaml') if not os.path.exists(ormb_file_path): logger.warn(f'{ormb_file_path} does not exist') return old_model_path = os.path.join(self.model_root_path, self._serving_name, 'model') if not os.path.isdir(old_model_path): logger.warn(f'{old_model_path} does not exist') return # Phase 1: Extract model_format and yaml format = "" yaml_data = {} if self.using_ormbfile: yaml_data = self._extract_yaml() if 'format' in yaml_data.items(): logger.error('model format missing') return format = yaml_data["format"] else: inputs = os.getenv(INPUTS_ENV, "[]") outputs = os.getenv(OUTPUTS_ENV, "[]") format = os.getenv(FORMAT_ENV, "") yaml_data["format"] = format yaml_data["signature"] = {} yaml_data["signature"]["inputs"] = json.loads(inputs) yaml_data["signature"]["outputs"] = json.loads(outputs) # Phase 2: Generate 'config.pbtxt' for triton models if isTritonModel(format): self._generate_config_pbtxt(yaml_data) # Phase 3: Generate 'model setting' for mlserver models if isMLServerModel(format): # set env for mlserver os.putenv('MODEL_FORMAT', format) # get version from ormbfile if 'version' in yaml_data.items(): version = yaml_data['version'] else: version = 'v1.0.0' self._generate_model_setting(format, version) # Phase 4: Re-organize directory format os.rename(old_model_path, self.model_path) self._format_model(format)
def creature_cd_get_one(creatureid, skillmetaid): if request.headers.get('Authorization') != f'Bearer {API_INTERNAL_TOKEN}': msg = f'Token not authorized' logger.warn(msg) return jsonify({"success": False, "msg": msg, "payload": None}), 403 # Pre-flight checks creature = fn_creature_get(None, creatureid)[3] if creature is None: return jsonify({ "success": False, "msg": f'Creature unknown (creatureid:{creatureid})', "payload": None }), 200 # CD get try: creature_cd = get_cd(creature, skillmetaid) except Exception as e: msg = f'CD Query KO [{e}]' logger.error(msg) return jsonify({"success": False, "msg": msg, "payload": None}), 200 else: if creature_cd is False: msg = f'CD get KO - CD Not Found (creatureid:{creature.id},skillmetaid:{skillmetaid})' logger.warning(msg) return jsonify({ "success": False, "msg": msg, "payload": None }), 200 elif creature_cd: msg = f'CD get OK (creatureid:{creature.id},skillmetaid:{skillmetaid})' logger.debug(msg) return jsonify({ "success": True, "msg": msg, "payload": { "cd": creature_cd, "creature": creature } }), 200 else: msg = f'CD get KO - Failed (creatureid:{creature.id},skillmetaid:{skillmetaid})' logger.warning(msg) return jsonify({ "success": False, "msg": msg, "payload": None }), 200
def cli(warnings_as_errors, **kwargs): """bionorm -- normalize, verify, and index bioinformatic data types. For more information, see the homepage at https://github.com/legumeinfo/bionorm Originally written by Connor Cameron <*****@*****.**>. Maintained by Joel Berendzen <*****@*****.**>, Copyright (C) 2020. National Center for Genome Resources. All rights reserved. License: BSD-3-Clause """ if warnings_as_errors: logger.warn( "Runtime warnings (e.g., from pandas) will cause exceptions") warnings.filterwarnings("error")
def _get_status(soup): c_statuses = [] tweets = soup.find_all('table', {"class": "tweet"}) #print("Tweets found: %d" % len(tweets)) for cur_tweet in tweets: if 'tombstone-tweet' in cur_tweet['class']: # Dead twitter account reference continue #soup.find_all('div', {"class": "tweet-text"}): cur_tweet_data = cur_tweet.find('div', {"class": "tweet-text"}) try: cur_tweet_text = cur_tweet_data.find('div', {"class": "dir-ltr"}) if cur_tweet_text is None: cur_tweet_text = cur_tweet_data.get_text() else: cur_tweet_text = cur_tweet_text.get_text() cur_tweet_date = cur_tweet.find('td', { "class": "timestamp" }).find('a').get_text() if "h" in cur_tweet_date and len(cur_tweet_date) < 4: hours = int(re.findall("([0-9]{0,2})\s?h", cur_tweet_date)[0]) cur_tweet_date = arrow.get().shift( hours=-hours).format("YYYY-MM-DD") elif "m" in cur_tweet_date and len(cur_tweet_date) < 4: hours = int(re.findall("([0-9]{0,2})\s?m", cur_tweet_date)[0]) cur_tweet_date = arrow.get().shift( hours=-hours).format("YYYY-MM-DD") elif "s" in cur_tweet_date and len(cur_tweet_date) < 4: hours = int(re.findall("([0-9]{0,2})\s?s", cur_tweet_date)[0]) cur_tweet_date = arrow.get().shift( hours=-hours).format("YYYY-MM-DD") elif len(cur_tweet_date) < 9: # On current year tweets doesn't show a year in text cur_tweet_date += arrow.get().format(" YY") cur_tweet_date = arrow.get(cur_tweet_date, "MMM D YY").format("YYYY-MM-DD") else: cur_tweet_date = arrow.get(cur_tweet_date, "D MMM YY").format("YYYY-MM-DD") c_statuses += [(cur_tweet_data['data-id'], cur_tweet['href'], cur_tweet_date, cur_tweet_text)] except: logger.warn("Not processing: \n %s" % cur_tweet) return c_statuses
def creature_equipment(creatureid): if request.headers.get('Authorization') != f'Bearer {API_INTERNAL_TOKEN}': msg = f'Token not authorized' logger.warn(msg) return jsonify({"success": False, "msg": msg, "payload": None}), 403 # Pre-flight checks creature = fn_creature_get(None, creatureid)[3] if creature is None: return jsonify({ "success": False, "msg": f'Creature unknown (creatureid:{creatureid})', "payload": None }), 200 try: slots = fn_slots_get_one(creature) except Exception as e: msg = f'Slots Query KO (creatureid:{creature.id}) [{e}]' logger.error(msg) return jsonify({"success": False, "msg": msg, "payload": None}), 200 try: equipment = { "feet": fn_item_get_one(slots.feet), "hands": fn_item_get_one(slots.hands), "head": fn_item_get_one(slots.head), "holster": fn_item_get_one(slots.holster), "lefthand": fn_item_get_one(slots.lefthand), "righthand": fn_item_get_one(slots.righthand), "shoulders": fn_item_get_one(slots.shoulders), "torso": fn_item_get_one(slots.torso), "legs": fn_item_get_one(slots.legs) } except Exception as e: msg = f'Equipment Query KO (creatureid:{creature.id}) [{e}]' logger.error(msg) return jsonify({"success": False, "msg": msg, "payload": None}), 200 else: msg = f'Equipment Query OK (creatureid:{creature.id})' logger.debug(msg) return jsonify({ "success": True, "msg": msg, "payload": { "equipment": equipment, "creature": creature } }), 200
def get_processor_name(): import subprocess import json result = subprocess.run(["lscpu", "-J"], check=False, capture_output=True) if result.returncode != 0: logger.warn( "Failed to get processor name: {} returned error code {}: {}", result.args, result.returncode, result.stderr, ) return None for obj in json.loads(result.stdout)["lscpu"]: if obj["field"].startswith("Model name"): return obj["data"]
def _get_model(self): """Helper function for reading the network.""" try: try: model = self._ie_core.read_network(model=self.model_structure, weights=self.model_weights) except AttributeError: logger.warn( "Using an old version of OpenVINO, consider updating it!") model = IENetwork(model=self.model_structure, weights=self.model_weights) except Exception: raise ValueError("Could not Initialise the network. " "Have you entered the correct model path?") else: return model