def configure_args(self): """Define arguments for this script. Called from :py:meth:`__init__()`. Re-define to define custom command-line arguments or pass through existing ones:: def configure_args(self): super(MRYourJob, self).configure_args() self.add_passthru_arg(...) self.add_file_arg(...) self.pass_arg_through(...) ... """ # if script path isn't set, expect it on the command line if self._FIRST_ARG_IS_SCRIPT_PATH: self.arg_parser.add_argument(dest='script_path', help='path of script to launch') self.arg_parser.add_argument( dest='args', nargs='*', help=('input paths to read (or stdin if not set). If --spark' ' is set, the input and output path for the spark job.')) _add_basic_args(self.arg_parser) _add_job_args(self.arg_parser) _add_runner_args(self.arg_parser)
def configure_args(self): """Define arguments for this script. Called from :py:meth:`__init__()`. Re-define to define custom command-line arguments or pass through existing ones:: def configure_args(self): super(MRYourJob, self).configure_args() self.add_passthru_arg(...) self.add_file_arg(...) self.pass_arg_through(...) ... """ # if script path isn't set, expect it on the command line if self._FIRST_ARG_IS_SCRIPT_PATH: self.arg_parser.add_argument( dest='script_path', help='path of script to launch') self.arg_parser.add_argument( dest='args', nargs='*', help=('input paths to read (or stdin if not set). If --spark' ' is set, the input and output path for the spark job.')) _add_basic_args(self.arg_parser) _add_job_args(self.arg_parser) _add_runner_args(self.arg_parser)