def analyze(): path = session.get("path") title = session.get("title") youtube_id = session.get("youtube_id") session.pop("path", None) if request.method == "GET" and path is not None: # TL解析 time_line, time_data, total_damage, debuff_value, status = an.analyze_movie(path) # キャッシュ保存 cm.save_cache(youtube_id, title, time_line, False, total_damage, debuff_value, status) if status % 100 // 10 == 0: # 解析が正常終了ならば結果を格納 session["time_line"] = time_line session["time_data"] = time_data session["total_damage"] = total_damage session["debuff_value"] = debuff_value return render_template("analyze.html") else: session["path"] = el.ERR_BAD_RESOLUTION return render_template("analyze.html") else: return redirect("/")
def do_analyze(): # 変数不足時は終了 if not sys.argv[1]: return youtube_url = sys.argv[1] # ID部分の取り出し youtube_id = al.get_youtube_id(youtube_url) if not youtube_id: return queue_path = ap.queue_dir + str(youtube_id) pending_path = ap.pending_dir + str(youtube_id) cached = cm.cache_check(youtube_id) # 5分経過した3xx キャッシュ以外は再解析しない if cached: cm.clear_path(queue_path) cm.clear_path(pending_path) return url = "http://" + is_server() + "/rest/analyze?Url=" + youtube_url req = urllib.request.Request(url) try: with urllib.request.urlopen(req) as res: body = json.load(res) title = body.get("result").get("title") time_line = body.get("result").get("timeline") time_line_enemy = body.get("result").get("timeline_enemy") total_damage = body.get("result").get("total_damage") debuff_value = body.get("result").get("debuff_value") url_result = body.get("status") except HTTPError: title = "" url_result = state.ERR_APP_SERVER_HTTP except URLError: title = "" url_result = state.ERR_APP_SERVER_URL except TimeoutError: title = "" url_result = state.ERR_ANALYZE_TIMEOUT if url_result % 100 // 10 == 2: cm.save_cache(youtube_id, title, False, False, False, False, False, url_result) else: # キャッシュ保存 cm.save_cache(youtube_id, title, time_line, time_line_enemy, False, total_damage, debuff_value, url_result) cm.clear_path(queue_path) cm.clear_path(pending_path)
def do_analyze(): # 変数不足時は終了 if not sys.argv[1]: return youtube_url = sys.argv[1] # ID部分の取り出し youtube_id = al.get_youtube_id(youtube_url) if not youtube_id: return queue_path = ap.queue_dir + str(youtube_id) pending_path = ap.pending_dir + str(youtube_id) cached = cm.cache_check(youtube_id) # 5分経過した3xx キャッシュ以外は再解析しない if cached: cm.clear_path(queue_path) cm.clear_path(pending_path) return # youtube動画検索/検証 path, title, length, thumbnail, url_result = al.search(youtube_id) if url_result % 100 // 10 == 2: cm.save_cache(youtube_id, title, False, False, False, False, False, url_result) else: # TL解析 time_line, time_line_enemy, time_data, total_damage, debuff_value, damages, analyze_result = al.analyze_movie( path) # キャッシュ保存 cm.save_cache(youtube_id, title, time_line, time_line_enemy, False, total_damage, debuff_value, damages, analyze_result) cm.clear_path(queue_path) cm.clear_path(pending_path)
def do_analyze(url): # ID部分の取り出し youtube_id = al.get_youtube_id(url) if not youtube_id: return cached = cm.cache_check(youtube_id) # 5分経過した3xx キャッシュ以外は再解析しない if cached: return # youtube動画検索/検証 path, title, length, thumbnail, url_result = al.search(youtube_id) if url_result % 100 // 10 == 2: cm.save_cache(youtube_id, title, False, False, False, False, url_result) else: # TL解析 time_line, time_data, total_damage, debuff_value, analyze_result = al.analyze_movie( path) # キャッシュ保存 cm.save_cache(youtube_id, title, time_line, False, total_damage, debuff_value, analyze_result)
create_date='2018-10-17T19:13:41Z', ) generate_dev( dirpath='out/connectors/dev', author='Danilo B.', name='Generic Pin Header 2.54mm', name_lower='generic male pin header', kind=KIND_HEADER, cmpcat='4a4e3c72-94fb-45f9-a6d8-122d2af16fb1', keywords='pin header, male header, tht, generic', min_pads=1, max_pads=40, pad_drills=[0.9, 1.0, 1.1], create_date='2018-10-17T19:13:41Z', ) generate_dev( dirpath='out/connectors/dev', author='Danilo B.', name='Soldered Wire Connector 2.54mm', name_lower='generic soldered wire connector', kind=KIND_WIRE_CONNECTOR, cmpcat='d0618c29-0436-42da-a388-fdadf7b23892', keywords='connector, soldering, generic', min_pads=1, max_pads=40, pad_drills=[1.0], create_date='2018-10-17T19:13:41Z', ) # TODO: Generate sym, cmp and dev for soldered wire connector save_cache(uuid_cache_file, uuid_cache)
def index(): if request.method == "POST": url = (request.form["Url"]) # urlからid部分の抽出 youtube_id = an.get_youtube_id(url) if youtube_id is False: error = el.get_error_message(el.ERR_BAD_URL) return render_template("index.html", error=error) cache = cm.cache_check(youtube_id) if cache is not False: title, time_line, time_data, total_damage, debuff_value, past_status = cache if past_status // 100 == 2: debuff_dict, data_txt, data_url, total_damage = get_web_txt(youtube_id, title, time_line, debuff_value, total_damage) return render_template("result.html", title=title, timeLine=time_line, timeData=time_data, totalDamage=total_damage, debuffDict=debuff_dict, data_txt=data_txt, data_url=data_url) elif past_status // 100 == 3: pass else: error = el.get_error_message(past_status) return render_template("index.html", error=error) path, title, length, thumbnail, url_result = an.search(youtube_id) if url_result // 100 == 4: error = el.get_error_message(url_result) cm.save_cache(youtube_id, title, False, False, False, False, url_result) return render_template("index.html", error=error) elif url_result == el.ERR_CANT_GET_HD: error = el.get_error_message(url_result) return render_template("index.html", error=error) session["path"] = path session["title"] = title session["youtube_id"] = youtube_id length = int(int(length) / 8) + 3 return render_template("analyze.html", title=title, length=length, thumbnail=thumbnail) elif request.method == "GET": if "v" in request.args: # ?v=YoutubeID 形式のGETであればリザルト返却 youtube_id = request.args.get("v") if re.fullmatch(r"^([a-zA-Z0-9_-]{11})$", youtube_id): cache = cm.cache_check(youtube_id) if cache is not False: title, time_line, time_data, total_damage, debuff_value, past_status = cache if past_status // 100 == 2: debuff_dict, data_txt, data_url, total_damage = get_web_txt(youtube_id, title, time_line, debuff_value, total_damage) return render_template("result.html", title=title, timeLine=time_line, timeData=time_data, totalDamage=total_damage, debuffDict=debuff_dict, data_txt=data_txt, data_url=data_url) elif past_status // 100 == 3: pass else: error = el.get_error_message(past_status) return render_template("index.html", error=error) else: # キャッシュが存在しない場合は解析 path, title, length, thumbnail, url_result = an.search(youtube_id) if url_result // 100 == 4: error = el.get_error_message(url_result) cm.save_cache(youtube_id, title, False, False, False, False, url_result) return render_template("index.html", error=error) elif url_result == el.ERR_CANT_GET_HD: error = el.get_error_message(url_result) return render_template("index.html", error=error) session["path"] = path session["title"] = title session["youtube_id"] = youtube_id length = int(int(length) / 8) + 3 return render_template("analyze.html", title=title, length=length, thumbnail=thumbnail) else: # prilog.jp/(YoutubeID)に該当しないリクエスト error = "不正なリクエストです" return render_template("index.html", error=error) else: path = session.get("path") session.pop("path", None) session.pop("title", None) session.pop("youtube_id", None) error = None if path is el.ERR_BAD_RESOLUTION: error = el.get_error_message(path) elif path is not None: cm.clear_path(path) return render_template("index.html", error=error)
def rest_analyze(): status = el.DONE rest_result = {} ret = {} url = "" if request.method == "POST": if "Url" not in request.form: status = el.ERR_BAD_REQ ret["result"] = rest_result ret["msg"] = el.get_error_message(status) ret["status"] = status return jsonify(ret) else: url = request.form["Url"] elif request.method == "GET": if "Url" not in request.args: status = el.ERR_BAD_REQ ret["result"] = rest_result ret["msg"] = el.get_error_message(status) ret["status"] = status return jsonify(ret) else: url = request.args.get("Url") # キャッシュ確認 youtube_id = an.get_youtube_id(url) if youtube_id is False: # 不正なurlの場合 status = el.ERR_BAD_URL else: # 正常なurlの場合 cache = cm.cache_check(youtube_id) if cache is not False: # キャッシュ有りの場合 # キャッシュを返信 title, time_line, time_data, total_damage, debuff_value, past_status = cache if past_status // 100 == 2: rest_result = get_rest_result(title, time_line, time_data, total_damage, debuff_value) ret["result"] = rest_result ret["msg"] = el.get_error_message(past_status) ret["status"] = past_status return jsonify(ret) elif (past_status // 100) == 3: pass else: ret["result"] = rest_result ret["msg"] = el.get_error_message(past_status) ret["status"] = past_status return jsonify(ret) # start analyze # 既にキューに登録されているか確認 queue_path = queue_dir + str(youtube_id) pending_path = pending_dir + str(youtube_id) queued = os.path.exists(queue_path) if queued: # 既に解析中の場合 while True: # キューが消えるまで監視 # 暫定的実装 # 監視中にキューが30分以上残置されているのを見つけると削除する try: now = datetime.date.today() # 現在の時刻を取得 timestamp = datetime.date.fromtimestamp(int(os.path.getmtime(queue_path))) if (now - timestamp).seconds >= 30 * 60: # 30分経過してたら削除 cm.clear_path(queue_path) cm.clear_path(pending_path) except FileNotFoundError: pass queued = os.path.exists(queue_path) if queued: tm.sleep(1) continue else: # 既に開始されている解析が完了したら、そのキャッシュJSONを返す cache = cm.cache_check(youtube_id) if cache is not False: title, time_line, time_data, total_damage, debuff_value, past_status = cache if past_status // 100 == 2 or past_status // 100 == 3: rest_result = get_rest_result(title, time_line, time_data, total_damage, debuff_value) status = past_status break else: # キャッシュ未生成の場合 # キャッシュを書き出してから解析キューから削除されるため、本来起こり得ないはずのエラー status = el.ERR_UNEXPECTED break else: # 既に解析中ではない場合 # 解析キューに登録 cm.queue_append(queue_path) # キューが回ってきたか確認し、来たら解析実行 while True: if not cm.is_pending_exists() and cm.is_queue_current(queue_path): # pendingに登録 pending_path = pending_dir + str(youtube_id) cm.pending_append(pending_path) # youtube動画検索/検証 path, title, length, thumbnail, url_result = an.search(youtube_id) status = url_result if url_result // 100 == 4: cm.save_cache(youtube_id, title, False, False, False, False, url_result) elif url_result == el.ERR_CANT_GET_HD: pass else: # TL解析 time_line, time_data, total_damage, debuff_value, analyze_result = an.analyze_movie(path) status = analyze_result # キャッシュ保存 cm.save_cache(youtube_id, title, time_line, False, total_damage, debuff_value, status) if analyze_result is el.DONE: # 解析が正常終了ならば結果を格納 rest_result = get_rest_result(title, time_line, time_data, total_damage, debuff_value) cm.clear_path(queue_path) cm.clear_path(pending_path) break tm.sleep(1) ret["result"] = rest_result ret["msg"] = el.get_error_message(status) ret["status"] = status return jsonify(ret)
def index(): if request.method == "POST": url = (request.form["Url"]) # urlからid部分の抽出 youtube_id = al.get_youtube_id(url) if youtube_id is False: error = state.get_error_message(state.ERR_BAD_URL) return render_template("index.html", error=error) cache = cm.cache_check(youtube_id) if cache is not False: title, time_line, time_line_enemy, time_data, total_damage, debuff_value, past_status = cache if past_status % 100 // 10 == 0: debuff_dict, data_txt, data_url, total_damage = get_web_txt( youtube_id, title, time_line, debuff_value, total_damage) return render_template("result.html", title=title, timeLine=time_line, timeLineEnemy=time_line_enemy, timeData=time_data, totalDamage=total_damage, debuffDict=debuff_dict, data_txt=data_txt, data_url=data_url) else: error = state.get_error_message(past_status) return render_template("index.html", error=error) if SERVER_ERROR_STATE: error = state.get_error_message(state.ERR_SERVICE_UNAVAILABLE) return render_template("index.html", error=error) # start download dl_queue_path = dl_queue_dir + str(youtube_id) dl_ongoing_path = dl_ongoing_dir + str(youtube_id) # 既にキューに登録されているか確認 queued = os.path.exists(dl_queue_path) if not queued: # 既にダウンロード待機中ではない場合、ダウンロード待機キューに登録 cm.queue_append(dl_queue_path) # キューが回ってきたか確認し、来たらダウンロード実行 while True: if not cm.is_path_exists(dl_ongoing_path) and cm.is_path_due( dl_queue_path): if cm.is_pending_download(15): # check pending download break timeout = cm.watchdog_download(youtube_id, 300) # 5分間タイムアウト監視 if timeout: cm.clear_path(dl_queue_path) error = "動画の解析待ちでタイムアウトが発生しました。再実行をお願いします。" return render_template("index.html", error=error) tm.sleep(1) else: # ダウンロード待機中の場合エラーメッセージ表示 cm.clear_path(dl_queue_path) error = "同一の動画が解析中です。時間を置いて再実行をお願いします。" return render_template("index.html", error=error) path, title, length, thumbnail, url_result = al.search(youtube_id) cm.clear_path(dl_queue_path) if url_result % 100 // 10 == 2: error = state.get_error_message(url_result) cm.save_cache(youtube_id, title, False, False, False, False, False, url_result) return render_template("index.html", error=error) session["path"] = path session["title"] = title session["youtube_id"] = youtube_id length = int(int(length) / 8) + 3 return render_template("analyze.html", title=title, length=length, thumbnail=thumbnail) elif request.method == "GET": if "v" in request.args: # ?v=YoutubeID 形式のGETであればリザルト返却 youtube_id = request.args.get("v") if re.fullmatch(r"^([a-zA-Z0-9_-]{11})$", youtube_id): cache = cm.cache_check(youtube_id) if cache is not False: title, time_line, time_line_enemy, time_data, total_damage, debuff_value, past_status = cache if past_status % 100 // 10 == 0: debuff_dict, data_txt, data_url, total_damage = get_web_txt( youtube_id, title, time_line, debuff_value, total_damage) return render_template("result.html", title=title, timeLine=time_line, timeLineEnemy=time_line_enemy, timeData=time_data, totalDamage=total_damage, debuffDict=debuff_dict, data_txt=data_txt, data_url=data_url) else: error = state.get_error_message(past_status) return render_template("index.html", error=error) else: # キャッシュが存在しない場合は解析 if SERVER_ERROR_STATE: error = state.get_error_message( state.ERR_SERVICE_UNAVAILABLE) return render_template("index.html", error=error) # start download dl_queue_path = dl_queue_dir + str(youtube_id) dl_ongoing_path = dl_ongoing_dir + str(youtube_id) # 既にキューに登録されているか確認 queued = os.path.exists(dl_queue_path) if not queued: # 既にダウンロード待機中ではない場合、ダウンロード待機キューに登録 cm.queue_append(dl_queue_path) # キューが回ってきたか確認し、来たらダウンロード実行 while True: if not cm.is_path_exists( dl_ongoing_path) and cm.is_path_due( dl_queue_path): if cm.is_pending_download( 15): # check pending download break timeout = cm.watchdog_download(youtube_id, 300) # 5分間タイムアウト監視 if timeout: cm.clear_path(dl_queue_path) error = "動画の解析待ちでタイムアウトが発生しました。再実行をお願いします。" return render_template("index.html", error=error) tm.sleep(1) else: # ダウンロード待機中の場合エラーメッセージ表示 cm.clear_path(dl_queue_path) error = "同一の動画が解析中です。時間を置いて再実行をお願いします。" return render_template("index.html", error=error) path, title, length, thumbnail, url_result = al.search( youtube_id) cm.clear_path(dl_queue_path) if url_result % 100 // 10 == 2: error = state.get_error_message(url_result) cm.save_cache(youtube_id, title, False, False, False, False, False, url_result) return render_template("index.html", error=error) session["path"] = path session["title"] = title session["youtube_id"] = youtube_id length = int(int(length) / 8) + 3 return render_template("analyze.html", title=title, length=length, thumbnail=thumbnail) else: # prilog.jp/(YoutubeID)に該当しないリクエスト error = "不正なリクエストです" return render_template("index.html", error=error) else: path = session.get("path") session.pop("path", None) session.pop("title", None) session.pop("youtube_id", None) error = None if str(path).isdecimal(): error = state.get_error_message(path) elif path is not None: cm.clear_path(path) return render_template("index.html", error=error)
def execute(args): ''' Create a report like this: 1. The lines from the Dockerfile 2. What the tool is doing (either getting package information from the cache or command library 3. The list of packages 4. Any issues that were detected or suggestions to change the Dockerfile or Command Library For summary, print 1. The lines from the Dockerfile 2. The packages that came from that line ''' report = '' logger = logging.getLogger('ternlog') if args.dockerfile: # parse the dockerfile common.load_docker_commands(args.dockerfile) # master list of package names so far master_list = [] # ---------------------------------------------------- # Step 1: Get the packages installed in the base image # ---------------------------------------------------- if not args.summary: report = report + common.print_dockerfile_base() base_image_msg = common.get_dockerfile_base() # get a list of packages that are installed in the base image # the list may contain some layers with no packages in it because # there may be no record of them in the cache base_obj_list = common.get_base_obj(base_image_msg[0]) for layer_obj in base_obj_list: report = print_image_base(report, base_image_msg, layer_obj, master_list, args.summary, logger) # ---------------------------------------------------- # Step 2: Get the packages installed in the given image # ---------------------------------------------------- # get a list of packages that may be installed from the dockerfile build, msg = common.is_build() if build: # get the shell that we will use for all the commands shell = common.get_image_shell(base_image_msg[0]) # start a container with the built image image_tag_string = common.get_dockerfile_image_tag() start_container(image_tag_string) report = print_dockerfile_run(report, shell, len(base_obj_list), master_list, args.summary, logger) # remove container when done remove_container() remove_image(image_tag_string) else: # Dockerfile does not build, so get packages from dockerfile # parsing only pkg_dict = common.get_dockerfile_packages() report = report + env_dep_dockerfile.format(build_fail_msg=msg) report = report + checking_against_snippets report = report + 'Packages from parsing Dockerfile RUN commands:\n' for pkg in pkg_dict['recognized']: report = report + ' ' + pkg report = report + '\nUnregonized RUN commands in Dockerfile:\n' for cmd in pkg_dict['unrecognized']: report = report + cmd + '\n' common.save_cache() write_report(report)
def __setattr__(self, key, value): self.__dict__[key] = value if key != 'cache_file': cm.save_cache(self, self.cache_file)
def build_porb(): start_p, trans_p, emit_p = cal_prob() save_cache(start_p, os.path.join(config.get_data_path(), 'start_p.p')) save_cache(trans_p, os.path.join(config.get_data_path(), 'trans_p.p')) save_cache(emit_p, os.path.join(config.get_data_path(), 'emit_p.p'))