Пример #1
0
def compress(videofolder, producedvideofolder, videoname, metadata):
    try:
        logging.debug('Compressing Started => ' + videoname)
        save_path = os.path.join(producedvideofolder,
                                 secure_filename(videoname + '.mp4'))
        mSave_path = save_path
        chunks = sorted(os.listdir(videofolder))
        for video in chunks:
            if video.__contains__('.json'):
                continue
            raw_video = os.path.join(videofolder, video)
            # cmd = 'ffmpeg -i ' + raw_video + ' -s ' + config.VID_RESOLUTION + ' -strict -2 '
            cmd = 'ffmpeg -i ' + raw_video + ' -s ' + config.VID_RESOLUTION \
                  + ' -strict -2 -filter:v fps=fps=' + str(config.FPS) \
                  + ' -threads ' + str(config.NUMBER_OF_CORES) + ' ' + save_path

            logging.debug(cmd)
            # saving metadata to check worker to compressing in process or done then upload the video
            save_path = os.path.join(producedvideofolder, 'metadata.json')
            metadata['infoExtracted'] = 0
            with open(save_path, 'w') as f:
                metadata['isCompressingDone'] = 0
                f.write(str(metadata))

            manager = DB()
            if os.system(cmd) == 0:  # success
                data = {
                    'status': 1,
                    'path': mSave_path,
                    'tags': "null",  # 'male, person'
                    'id': metadata['video_id']
                }
                manager.update_video(data)

                with open(save_path, 'w') as f:
                    metadata['isCompressingDone'] = 1
                    f.write(str(metadata))

                shutil.rmtree(videofolder)  # removing whole raw dir
                logging.debug('Compressing Complete => ' + producedvideofolder)
                # upload(producedvideofolder, bucket, producedvideofolder.replace(config.produced_data_dir, ''))
            else:
                data = {
                    'status': 1,
                    'path': os.path.join(videofolder, video),
                    'tags': "null",  # 'male, person'
                    'id': metadata['video_id']
                }
                manager.update_video(data)
                logging.debug(
                    'Error in ffmpeg compressing so uploading raw video ' +
                    videofolder)
                # upload(videofolder, bucket, videofolder.replace(config.data_dir, ''))
                with open(save_path, 'w') as f:
                    metadata['isCompressingDone'] = 99
                    f.write(str(metadata))
    except Exception as err:
        logging.debug(err)
Пример #2
0
def search_and_extract(manager):
    manager = DB()
    videodir = os.listdir(config.produced_data_dir)

    multithreads = [
    ]  # appedning compressing threads here to be executed parallel
    ecount = 1
    try:
        for videoname in videodir:
            # videoname == invite_id
            videofolder = os.path.join(config.produced_data_dir, videoname)
            videofiles = [
                filename for filename in os.listdir(videofolder)
                if filename.__contains__('.mp4')
            ]

            matadata_path = os.path.join(videofolder, 'metadata.json')
            with open(matadata_path, 'r') as jF:
                s = jF.read()
                s = s.replace("'", '"')
                metadata = json.loads(s)
                print(metadata)

            for video in videofiles:
                videopath = os.path.join(videofolder, video)
                tags = 'null'
                try:
                    if (int(metadata['infoExtracted']) == 0):
                        tags = extract_info(videopath)
                        with open(matadata_path, 'w') as jF:
                            metadata['infoExtracted'] = 1
                            jF.write(str(metadata))

                except Exception as err:
                    print(err)
                    with open(matadata_path, 'w') as jF:
                        metadata['infoExtracted'] = 1
                        jF.write(str(metadata))
                    tags = extract_info(videopath)

                data = {
                    'status': 1,
                    'path': videopath,
                    'tags': tags,  #'male, person'
                    'id': metadata['video_id']
                }

                manager.update_video(data)

    except Exception as err:
        print(err)

    return