def main(): description = __doc__.format(version=VERSION) parser = argparse.ArgumentParser(description=description, formatter_class=CustomArgParseFormatter) parser.add_argument('-i', '--input_config', required=True, help='The path to the input config file (required).') parser.add_argument('-p', '--pipeline_config', required=True, help='The path to the pipeline config file (required).') parser.add_argument('-c', '--cloud_url', default=None, help='The Google Cloud Storage URL to upload to.') parser.add_argument('-o', '--output', default='output_files', help='The output folder to write files to. ' + 'Used even if uploading to cloud storage.') args = parser.parse_args() # Check if the directory for outputted Packager files exists, and if it # does, delete it and remake a new one. if os.path.exists(args.output): shutil.rmtree(args.output) os.mkdir(args.output) controller = ControllerNode() with open(args.input_config) as f: input_config_dict = yaml.load(f) with open(args.pipeline_config) as f: pipeline_config_dict = yaml.load(f) if args.cloud_url: if not args.cloud_url.startswith('gs://'): parser.error('Invalid cloud URL, only gs:// URLs are supported currently') try: controller.start(args.output, input_config_dict, pipeline_config_dict, args.cloud_url) except: # If the controller throws an exception during startup, we want to call # stop() to shut down any external processes that have already been started. # Then, re-raise the exception. controller.stop() raise # Sleep so long as the pipeline is still running. while controller.is_running(): try: time.sleep(1) except KeyboardInterrupt: # Sometimes ffmpeg/packager take a while to be killed, so this signal # handler will kill both running processes as there is SIGINT signal. controller.stop() break
def start(): global controller if controller is not None: return createCrossOriginResponse(status=403, body='Instance already running!') cleanup() # Receives configs from the tests to start Shaka Streamer. try: configs = json.loads(flask.request.data) except Exception as e: return createCrossOriginResponse(status=400, body=str(e)) # Enforce quiet mode without needing it specified in every test. configs['pipeline_config']['quiet'] = True controller = ControllerNode() try: controller.start(configs['output_location'], configs['input_config'], configs['pipeline_config'], configs['bitrate_config'], check_deps=False, use_hermetic=not use_system_binaries) except Exception as e: # If the controller throws an exception during startup, we want to call # stop() to shut down any external processes that have already been started. controller.stop() controller = None # Then, fail the request with a message that indicates what the error was. if isinstance(e, ConfigError): body = json.dumps({ 'error_type': type(e).__name__, 'class_name': e.class_name, 'field_name': e.field_name, 'field_type': e.field.get_type_name(), 'message': str(e), }) return createCrossOriginResponse(status=418, mimetype='application/json', body=body) elif isinstance(e, RuntimeError): body = json.dumps({ 'error_type': 'RuntimeError', 'message': str(e), }) return createCrossOriginResponse(status=418, mimetype='application/json', body=body) else: print('EXCEPTION', repr(e), traceback.format_exc(), flush=True) return createCrossOriginResponse(status=500, body=str(e)) return createCrossOriginResponse()
def main(): description = __doc__.format(version=VERSION) parser = argparse.ArgumentParser(description=description, formatter_class=CustomArgParseFormatter) parser.add_argument('-i', '--input_config', required=True, help='The path to the input config file (required).') parser.add_argument( '-p', '--pipeline_config', required=True, help='The path to the pipeline config file (required).') parser.add_argument('-c', '--cloud_url', default=None, help='The Google Cloud Storage or Amazon S3 URL to ' + 'upload to. (Starts with gs:// or s3://)') parser.add_argument('-o', '--output', default='output_files', help='The output folder to write files to. ' + 'Used even if uploading to cloud storage.') args = parser.parse_args() # Check if the directory for outputted Packager files exists, and if it # does, delete it and remake a new one. if os.path.exists(args.output): shutil.rmtree(args.output) os.mkdir(args.output) controller = ControllerNode() with open(args.input_config) as f: input_config_dict = yaml.load(f) with open(args.pipeline_config) as f: pipeline_config_dict = yaml.load(f) if args.cloud_url: if (not args.cloud_url.startswith('gs://') and not args.cloud_url.startswith('s3://')): parser.error( 'Invalid cloud URL! Only gs:// and s3:// URLs are supported') with controller.start(args.output, input_config_dict, pipeline_config_dict, args.cloud_url): # Sleep so long as the pipeline is still running. while True: status = controller.check_status() if status != node_base.ProcessStatus.Running: return 0 if status == node_base.ProcessStatus.Finished else 1 time.sleep(1)
def start(): global controller if controller is not None: return createCrossOriginResponse( status=403, body='Instance already running!') cleanupFiles() # Receives configs from the tests to start Shaka Streamer. configs = json.loads(flask.request.data) controller = ControllerNode() try: controller.start(OUTPUT_DIR, configs['input_config'], configs['pipeline_config']) except: # If the controller throws an exception during startup, we want to call # stop() to shut down any external processes that have already been started. # Then, re-raise the exception. controller.stop() raise return createCrossOriginResponse()