def init_db(): # import all modules here that might define models so that # they will be registered properly on the metadata. Otherwise # you will have to import them first before calling init_db() import userapiapp.models Base.metadata.drop_all(bind=engine) get_logger().info('Database dropped') Base.metadata.create_all(bind=engine) get_logger().info('Database created')
def get_clip(stream_id): stream = Stream.query.get(stream_id) if stream: if not stream.start_date: return jsonify(error_message='Stream not started'), 500 else: if request.method == 'POST': start = request.form.get('start') stop = request.form.get('stop') else: start = request.args.get('start') stop = request.args.get('stop') start = convert_to_datetime( int(start)) if start else stream.start_date if stop: stop = convert_to_datetime(int(stop)) else: if stream.stop_date != None: stop = stream.stop_date else: stop = datetime.now() clip_request = ClipRequest.query.filter( ClipRequest.stream_id == stream_id, ClipRequest.start == start, ClipRequest.stop == stop).first() if not clip_request: clip_request = ClipRequest(stream_id, start, stop) db_session.add(clip_request) db_session.commit() get_logger().info( "Clip request %d for stream %d (%s - %s) created", clip_request.id, stream_id, start, stop) pop_params = [ 'python', 'manage.py', 'process_request', '-r', str(clip_request.id) ] if current_app.config['TESTING']: pop_params += ['-t', 'True'] pop = Popen(pop_params) if not clip_request.start_processing_date: result = {'status': 'pending'} elif not clip_request.done_date: result = {'status': 'processing'} else: clips = [{'link': link.url} for link in clip_request.links] result = {'status': 'done', 'clips': clips} return jsonify(**result) else: return jsonify(error_message='Stream not found'), 404
def get_clips_info(self, stream_id, start_stamp, stop_stamp): if self.use_mock: storage_mock.init(self) params = { 'stream_id': stream_id, 'start_time': start_stamp, 'stop_time': stop_stamp } get_logger().info("Get meta info %d %s - %s from %s", stream_id, start_stamp, stop_stamp, self.info_url) response = requests.get(self.info_url, params=params) return response.json()
def compile_clip(self, stream_id, start, stop, request_id): start_stamp = convert_from_datetime(start) stop_stamp = convert_from_datetime(stop) info = self.storage_proxy.get_clips_info(stream_id, start_stamp, stop_stamp) sorted_clips = Seamstress.sort_clips(request_id, info['clips']) get_logger().debug("%d fragments for request %d found. %d groups", len(info['clips']), request_id, len(sorted_clips)) output_files = [] for group in sorted_clips: output_file = Seamstress.get_output_name(stream_id, group.start_stamp() + group.start_offset(start_stamp), group.length(start_stamp, stop_stamp)) full_output_file = os.path.join(self.result_folder, output_file) if not os.path.isfile(full_output_file): group.download_fragments(self.storage_proxy, self.temp_folder) self.join_clip(group.fragments, full_output_file, group.start_offset(start_stamp), group.length(start_stamp, stop_stamp)) get_logger().info("Clip %s created", full_output_file) else: get_logger().info("Clip %s already exists", full_output_file) output_files.append(full_output_file) group.remove_downloaded() get_logger().debug("All fragments for request %d removed", request_id) return output_files
def compile_clip(self, stream_id, start, stop, request_id): start_stamp = convert_from_datetime(start) stop_stamp = convert_from_datetime(stop) info = self.storage_proxy.get_clips_info(stream_id, start_stamp, stop_stamp) sorted_clips = Seamstress.sort_clips(request_id, info['clips']) get_logger().debug("%d fragments for request %d found. %d groups", len(info['clips']), request_id, len(sorted_clips)) output_files = [] for group in sorted_clips: output_file = Seamstress.get_output_name( stream_id, group.start_stamp() + group.start_offset(start_stamp), group.length(start_stamp, stop_stamp)) full_output_file = os.path.join(self.result_folder, output_file) if not os.path.isfile(full_output_file): group.download_fragments(self.storage_proxy, self.temp_folder) self.join_clip(group.fragments, full_output_file, group.start_offset(start_stamp), group.length(start_stamp, stop_stamp)) get_logger().info("Clip %s created", full_output_file) else: get_logger().info("Clip %s already exists", full_output_file) output_files.append(full_output_file) group.remove_downloaded() get_logger().debug("All fragments for request %d removed", request_id) return output_files
def get_clip(stream_id): stream = Stream.query.get(stream_id) if stream: if not stream.start_date: return jsonify(error_message='Stream not started'), 500 else: if request.method == 'POST': start = request.form.get('start') stop = request.form.get('stop') else: start = request.args.get('start') stop = request.args.get('stop') start = convert_to_datetime(int(start)) if start else stream.start_date if stop: stop = convert_to_datetime(int(stop)) else: if stream.stop_date != None: stop = stream.stop_date else: stop = datetime.now() clip_request = ClipRequest.query.filter(ClipRequest.stream_id==stream_id, ClipRequest.start==start, ClipRequest.stop==stop).first() if not clip_request: clip_request = ClipRequest(stream_id, start, stop) db_session.add(clip_request) db_session.commit() get_logger().info("Clip request %d for stream %d (%s - %s) created", clip_request.id, stream_id, start, stop) pop_params = ['python', 'manage.py', 'process_request', '-r', str(clip_request.id)] if current_app.config['TESTING']: pop_params += ['-t', 'True'] pop = Popen(pop_params) if not clip_request.start_processing_date: result = {'status': 'pending'} elif not clip_request.done_date: result = {'status': 'processing'} else: clips = [{ 'link': link.url } for link in clip_request.links] result = {'status': 'done', 'clips': clips} return jsonify(**result) else: return jsonify(error_message='Stream not found'), 404
def start(stream_id): stream = Stream.query.get(stream_id) if stream: if stream.start_date: return jsonify(error_message='Stream already started'), 500 else: if segmentor_proxy.start(stream.id, stream.url): stream.start_date = datetime.now() db_session.commit() get_logger().info("Stream %d started", stream.id) return jsonify(result='ok') else: get_logger().error("Cannot start stream %d", stream.id) return jsonify(error_message='Cannot start stream'), 500 else: return jsonify(error_message='Stream not found'), 404
def create(): if request.json: url = request.json.get('url') description = request.json.get('description') else: url = request.form.get('url') description = request.form.get('description') if not url: return jsonify(error_message='URL cannot be empty'), 500 else: stream = Stream(url, description) db_session.add(stream) db_session.commit() get_logger().info("Stream %d (url: %s , description: %s) created", stream.id, stream.url, stream.description) return jsonify(stream_id=stream.id)
def clear_old_requests(): """ Clear all old requests """ requests_count = 0 files_count = 0 for request in ClipRequest.query.all(): for link in request.links: if os.path.isfile(link.file_path): os.remove(link.file_path) files_count += 1 db_session.delete(link) db_session.delete(request) requests_count += 1 count = ClipRequest.query.delete() db_session.commit() get_logger().info("Old requests cleared. Deleted: %d requests, %d files", requests_count, files_count) print "Deleted: {0} requests, {1} files".format(requests_count, files_count)
def process_request(request_id, test): """ Start to process clip request. Params: -r <request_id> """ if test: print '!!! TEST REQUEST PROCESSING !!!' current_app.config.from_object('tests.test_settings') init_app(current_app) print current_app.config['DATABASE_URI'] r = ClipRequest.query.get(request_id) if r == None: get_logger().error("Request %s not found", request_id) else: r.start_processing_date = datetime.now() db_session.commit() get_logger().info("Start to process request %s", request_id) s = Seamstress(StorageProxy(current_app.config), current_app.config['TEMP_FOLDER'], current_app.config['RESULT_CLIPS_FOLDER'], current_app.config['FFMPEG_BIN']) output_files = s.compile_clip(r.stream_id, r.start, r.stop, r.id) r.done_date = datetime.now() for o in output_files: link = ClipLink(os.path.join(current_app.config['DOWNLOAD_LINK_PREFIX'], o.split('/')[-1]), os.path.join(current_app.config['RESULT_CLIPS_FOLDER'], o)) r.links.append(link) db_session.commit() get_logger().info("Finish to process request %s", request_id)
def get_clip(self, file_name, clip_id): if self.use_mock: storage_mock.init(self) clip_url = self.clip_url.format(clip_id) get_logger().info("Download clip %d from %s", clip_id, self.clip_url) response = requests.get(clip_url, stream=True) with open(file_name, 'wb') as f: file_size = int(response.headers['content-length']) get_logger().info("Download to %s Bytes: %s", file_name, file_size) file_size_dl = 0 for chunk in response.iter_content(chunk_size=8024): if chunk: file_size_dl += len(chunk) f.write(chunk) f.flush() get_logger().debug("Clip %s downloaded", file_name)
def get_status(self, stream_id): if self.use_mock: segmentor_mock.init(self) full_url = self.status_url.format(stream_id) try: get_logger().info("Get status of stream %d, call [GET] %s", stream_id, full_url) response = requests.get(full_url) except Exception: get_logger().exception("Cannot call status function for stream %d", stream_id) return None else: if response.status_code == 200: return response.json() else: get_logger().error( "Error while calling status function for %d: %s", stream_id, response.text) return None
def stop(self, stream_id): if self.use_mock: segmentor_mock.init(self) full_url = self.stop_url.format(stream_id) try: get_logger().info("Stop record for stream %d, call [POST] %s", stream_id, full_url) response = requests.post(full_url) except Exception: get_logger().exception("Cannot call stop function for stream %d", stream_id) return False else: if response.status_code == 200: return True else: get_logger().error( "Error while calling stop function for %d: %s", stream_id, response.text) return False
def join_clip(self, fragments_to_join, output_file, cut_start=0, cut_length=None): converted = [] for f in fragments_to_join: fragment_path = f[FragmentsGroup.DOWNLOAD_PATH_FIELD] new_filename = fragment_path + '.mpg' get_logger().debug("Convert %s to %s", fragment_path, new_filename) command = [self.ffmpeg, '-i', fragment_path, '-acodec', 'copy', '-vcodec', 'copy', '-f', 'mpegts', '-vbsf', 'h264_mp4toannexb', '-y', new_filename] Popen(command, bufsize=10**8).wait() converted.append(new_filename) all_clips = '|'.join(converted) get_logger().debug("Concat %d fragments to %s", len(converted), output_file) command = [self.ffmpeg, '-i', 'concat:' + all_clips + '', '-acodec', 'copy', '-vcodec', 'copy', '-ss', str(cut_start)] if cut_length: command += ['-t', str(cut_length)] command += ['-absf', 'aac_adtstoasc', '-y', output_file] print command Popen(command, bufsize=10**8).wait() for f in converted: get_logger().debug("Remove %s", f) os.remove(f)
def join_clip(self, fragments_to_join, output_file, cut_start=0, cut_length=None): converted = [] for f in fragments_to_join: fragment_path = f[FragmentsGroup.DOWNLOAD_PATH_FIELD] new_filename = fragment_path + '.mpg' get_logger().debug("Convert %s to %s", fragment_path, new_filename) command = [ self.ffmpeg, '-i', fragment_path, '-acodec', 'copy', '-vcodec', 'copy', '-f', 'mpegts', '-vbsf', 'h264_mp4toannexb', '-y', new_filename ] Popen(command, bufsize=10**8).wait() converted.append(new_filename) all_clips = '|'.join(converted) get_logger().debug("Concat %d fragments to %s", len(converted), output_file) command = [ self.ffmpeg, '-i', 'concat:' + all_clips + '', '-acodec', 'copy', '-vcodec', 'copy', '-ss', str(cut_start) ] if cut_length: command += ['-t', str(cut_length)] command += ['-absf', 'aac_adtstoasc', '-y', output_file] print command Popen(command, bufsize=10**8).wait() for f in converted: get_logger().debug("Remove %s", f) os.remove(f)
def hello(): get_logger().info('Hello!') return 'Hello! I\'m UserAPI application!'