def run(metric="SSIM", thre=0.05): out_dir = os.path.join(working_dir, "Phase1", metric) db = DBImpl({'url': os.path.join(playlists_dir, 'videos.db')}) sql = 'select hash, title, playlist from videos where hash = ?' with open("verified_videos.txt") as fin: for line in fin.readlines(): video_hash = line.strip() video_hash, video_title, video_playlist = db.queryone( sql, video_hash) # print video_title, video_hash video = video_title.strip() + '_' + video_hash video_file = video + ".mp4" video_path = os.path.join(video_dir, video_playlist, video_file) if (not os.path.exists(video_path)): video_file = video_title.strip() + ".mp4" video_path = os.path.join(video_dir, video_playlist, video_file) # print video_path out_folder = os.path.join(out_dir, video) if os.path.exists(out_folder): # os.rmdir(out_folder) continue else: os.mkdir(out_folder) extract_frames(video_path, out_folder=out_folder) diff_frames(out_folder, thre=thre, metric=metric)
def compare(): out_dir1 = os.path.join(working_dir, "Phase1", "SSIM") out_dir2 = os.path.join(working_dir, "Phase1", "NRMSE") db = DBImpl({'url': os.path.join(playlists_dir, 'videos.db')}) sql = 'select hash, title, playlist from videos where hash = ?' total = 0 with open("verified_videos.txt") as fin: for line in fin.readlines(): video_hash = line.strip() video_hash, video_title, video_playlist = db.queryone( sql, video_hash) # print video_title, video_hash video = video_title.strip() + '_' + video_hash frame_folder = os.path.join(out_dir1, video) with open(os.path.join(frame_folder, 'frames.txt')) as fin2: line = fin2.readlines()[0] frames1 = line.split() video = video_title.strip() + '_' + video_hash frame_folder = os.path.join(out_dir2, video) with open(os.path.join(frame_folder, 'frames.txt')) as fin2: line = fin2.readlines()[0] frames2 = line.split() print len(set(frames1) - set(frames2)), len(set(frames2) - set(frames1))
def main(): import os, sys from dbimpl import DBImpl db = DBImpl({'url': os.path.join(playlists_dir, 'videos.db')}) sql = 'select title from videos where hash = ?' video_hash = 'jJjg4JweJZU' frame = 143 # video_hash = 'o4Or0PMI_aI' # frame = 378 # video_hash = '6HydEu75iQI' # frame = 229 # video_hash = '6TIeyVWPvDY' # frame = 225 # video_hash = 'VKTEjBQzkgs' # frame = 37 # video_hash = 'KUdro0G1BV4' # frame = 81 video_title = db.queryone(sql, video_hash)[0].strip() print video_title, video_hash video_folder = video_title + '_' + video_hash completed_path = os.path.join(images_dir, video_folder, '%d.png'%frame) img = cv2.imread(completed_path) cimg = CImage(img, name=video_folder) cimg.preprocess() # cimg.show() # cimg.cluster_lines() rects = cimg.find_contours(show=False) rects = sorted(rects, key=lambda x: x[2]*x[3], reverse=True) x, y, w, h = rects[0] cv2.rectangle(cimg.img,(x,y),(x+w,y+h),(0,0,255),2) cv2.imshow('image', cimg.img) if cv2.waitKey(0) & 0xff == 27: cv2.destroyAllWindows()
def stat_valid(video_hash): db = DBImpl({'url': os.path.join(playlists_dir, 'videos.db')}) sql = 'select title, playlist from videos where hash = ?' res = db.queryone(sql, video_hash) video_name = res[0].strip() # image_folder = '../public/Images/%s_%s' % (video_name, video_hash) image_folder = images_dir + '/%s_%s' % (video_name, video_hash) with open(os.path.join(image_folder, 'predict.json')) as fin: predict_info = json.load(fin) valid_count, invalid_count = 0, 0 for f in predict_info: if predict_info[f]['label'] == 'valid': valid_count += 1 else: invalid_count += 1 print valid_count, invalid_count
def stat(metric="SSIM"): out_dir = os.path.join(working_dir, "Phase1", metric) db = DBImpl({'url': os.path.join(playlists_dir, 'videos.db')}) sql = 'select hash, title, playlist from videos where hash = ?' total = 0 with open("verified_videos.txt") as fin: for line in fin.readlines(): video_hash = line.strip() video_hash, video_title, video_playlist = db.queryone( sql, video_hash) # print video_title, video_hash video = video_title.strip() + '_' + video_hash frame_folder = os.path.join(out_dir, video) with open(os.path.join(frame_folder, 'frames.txt')) as fin2: line = fin2.readlines()[0] print(len(line.split())) total += len(line.split()) print total
class APIDBImpl: def __init__(self): self.dbimpl = DBImpl({ "type": "mysql", "url": "127.0.0.1", "username": "******", "password": "******", "database": "link_api" }) def query_records(self, entity): idx = entity.find('(') if idx > 0: entity = entity[0:idx].strip() sql = 'select * from link_api_record where name = %s' return self.dbimpl.querymany(sql, entity) def query_web_cache(self, link): sql = 'select * from web_cache where url = %s' return self.dbimpl.queryone(sql, link) def insert_or_update_cache(self, result): try: if not result[3]: sql = 'update web_cache set content=%s, access_time=%s where url=%s' self.dbimpl.updateone(sql, result[1], datetime.now(), result[2]) else: sql = 'insert web_cache(url, content) values(%s, %s)' self.dbimpl.updateone(sql, result[2], result[1]) except Exception as e: print e def close(self): self.dbimpl.close()
import diff_match_patch as dmp import ocr, re, sys, numpy, json import cv2 sys.path.append('../../python') from dbimpl import DBImpl from setting import * MIN_INTERVAL = 30 video_hash = sys.argv[1] db = DBImpl({'url': os.path.join(playlists_dir, 'videos.db')}) sql = 'select title, playlist from videos where hash = ?' res = db.queryone(sql, video_hash) video_name = res[0].strip() video_playlist = res[1].strip() video_file = video_name + "_" + video_hash # vnum, fnum, fnumf = int(sys.argv[1]), 1, 1. #4321 # fps = [15.002999, 29.970030, 30, 23.976150, 30, 29.970030, 30.001780, 30, 29.970030, 29.970030, 30, 15, 23.976024, 30, 15, 30, 29.873960, 30, 15, 25.000918, 30][vnum-1] #... print 'starting with frame', fnum, '\n' video_file = video_name + "_" + video_hash + ".mp4" video_path = os.path.join(video_dir, video_playlist, video_file) if(not os.path.exists(video_path)): video_file = video_name + ".mp4" video_path = os.path.join(video_dir, video_playlist, video_file) video = cv2.VideoCapture(video_path)
def extract_frames(video_hash): db = DBImpl({'url': os.path.join(playlists_dir, 'videos.db')}) sql = 'select title, playlist from videos where hash = ?' res = db.queryone(sql, video_hash) video_name = res[0].strip() video_playlist = res[1].strip() video_file = video_name + "_" + video_hash + ".mp4" video_path = os.path.join(video_dir, video_playlist, video_file) if (not os.path.exists(video_path)): video_file = video_name + ".mp4" video_path = os.path.join(video_dir, video_playlist, video_file) video = cv2.VideoCapture(video_path) fps = video.get(cv2.CAP_PROP_FPS) # fps = math.ceil(fps) # fps = 30 frame_count = video.get(cv2.CAP_PROP_FRAME_COUNT) print('video fps/frame count:', fps, "/", frame_count) extract_folder = '../public/Images/%s_%s' % (video_name, video_hash) main_folder = '../public/extracts/%s_%s/main' % (video_name, video_hash) second = 1 filter_frames = [] frame_seg = {} seg_frame = {} while True: frame_num = math.ceil(second * fps) + 1 for seg in range(1, 4): file_path = os.path.join( main_folder, 'frame%d-segment%d.txt' % (frame_num, seg)) print(file_path) if os.path.exists(file_path): # print 'found', frame_num if frame_num not in filter_frames: filter_frames.append(frame_num) frame_seg[frame_num] = [seg] else: frame_seg[frame_num].append(seg) if seg in seg_frame: seg_frame[seg].append(frame_num) else: seg_frame[seg] = [frame_num] second += 1 if frame_num > frame_count: break # print filter_frames # print [int(math.floor((f)/fps)) for f in filter_frames] for f in frame_seg: if len(frame_seg[f]) > 1: print f for s in seg_frame: print(s, len(seg_frame[s]))
if(iou<0): iou=0 # return the intersection over union value return iou db = DBImpl({'url': os.path.join(playlists_dir, 'videos.db')}) with open("verified_videos.txt") as fin, open("iou_results.csv", "w") as fout: sql = 'select hash, title from videos where hash = ?' pre_iou_results = [] for idx, line in enumerate(fin.readlines()): video_hash = line.strip() video_hash, video_title = db.queryone(sql, video_hash) video = video_title.strip() + '_' + video_hash.strip() print video iou_results = [] with open(os.path.join(images_dir, video, 'predict.json')) as fin2: predict_results = json.load(fin2) for frame in predict_results: if predict_results[frame]['label'] == 'valid' and predict_results[frame]['predict'] == 'invalid': iou_results.append((frame, 0, 'FN')) elif predict_results[frame]['label'] == 'invalid' and predict_results[frame]['predict'] == 'invalid': iou_results.append((frame, 1, 'TN')) elif predict_results[frame]['label'] == 'invalid' and predict_results[frame]['predict'] == 'valid': iou_results.append((frame, 0, 'FP'))