def run_command(prog, argv, url=None, key=None): description = '''Command line client for DOcplexcloud.''' epilog = '''Command details: info Get and display information for the jobs which ids are specified as ARG. download Download the attachment to the the current directory. rm Delete the jobs which ids are specfied as ARG. rm all Delete all jobs. logs Download and display the logs for the jobs which id are specified. ls Lists the jobs.''' epilog_cli = ''' execute Submit a job and wait for end of execution. Each ARG that is a file is uploaded as the job input. Example: Example: python run.py execute model.py model.data -v executes a job which input files are model.py and model.dada, in verbose mode. ''' filter_help = ''' Within filters, the following variables are defined: now: current date and time as timestamp in millisec minute: 60 sec in millisec hour: 60 minutes in millisec day: 24 hour in millisec job: The current job being filtered Example filter usage: Delete all jobs older than 3 hour python -m docplex.cli --filter "now-job['startedAt'] > 3*hour " rm ''' if ip is None: epilog += epilog_cli epilog += filter_help parser = argparse.ArgumentParser(prog=prog, description=description, epilog=epilog, formatter_class=argparse.RawDescriptionHelpFormatter) parser.add_argument('command', metavar='COMMAND', help='DOcplexcloud command') parser.add_argument('arguments', metavar='ARG', nargs='*', help='Arguments for the command') parser.add_argument('--no-delete', action='store_true', default=False, dest='nodelete', help="If specified, jobs are not deleted after execution") parser.add_argument('-v', '--verbose', action='store_true', help='Verbose mode') parser.add_argument('--as', nargs=1, metavar='HOST', dest="host_config", default=None, help="'as host' - use the cplex_config_<HOST>.py configuration file found in PYTHONPATH") parser.add_argument('--url', nargs=1, metavar='URL', dest="url", default=None, help="The DOcplexcloud connection URL. If not specified, will use those found in docplex config files") parser.add_argument('--key', nargs=1, metavar='API_KEY', dest="key", default=None, help="The DOcplexcloud connection key. If not specified, will use those found in docplex config files") parser.add_argument('--details', action='store_true', default=False, help='Display solve details as they are available') parser.add_argument('--filter', metavar='FILTER', default=None, help='filter on job. Example: --filter "True if (now-job.createdAt) > 3600"') parser.add_argument('--quiet', '-q', action='store_true', default=False, help='Only show numeric IDs as output') args = parser.parse_args(argv) program_result = ProgramResults() # Get the context here so that we have some credentials at hand context = Context.make_default_context() if args.host_config is not None: config_name = "cplex_config_%s.py" % args.host_config[0] config_file = list(filter(os.path.isfile, [os.path.join(x, config_name) for x in sys.path])) if len(config_file) == 0: print("Could not find config file for host: %s" % args.host_config[0]) program_result.return_code = -1 return(program_result) if args.verbose: print("Overriding host config with: %s" % config_file[0]) context.read_settings(config_file[0]) # use credentials in context unless they are given to this function client_url = context.solver.docloud.url if url is None else url client_key = context.solver.docloud.key if key is None else key # but if there are some credentials in arguments (--url, --key), use them if args.url: client_url = args.url if args.key: client_key = args.key if args.verbose: print('**** Connecting to %s with key %s' % (client_url, client_key)) print('Will send command %s' % args.command) print('Arguments:') for i in args.arguments: print(' -> %s' % i) print('verbose = %s' % args.verbose) client = JobClient(client_url, client_key) target_jobs = [] if args.filter: jobs = client.get_all_jobs() now = (datetime.datetime.now() - datetime.datetime(1970,1,1)).total_seconds() * 1000.0 minute = 60 * 1000 hour = 60 * minute day = 24 * hour context = {'now': now, 'minute': minute, 'hour': hour, 'day': day, } for j in jobs: context['job'] = j keep = False try: keep = eval(args.filter, globals(), context) except KeyError: # if a key was not foud, just assume expression is false keep = False if keep: target_jobs.append(j) if target_jobs: for i in target_jobs: print('applying to %s' % i['_id']) if args.command == 'ls': ls_jobs(client, program_result, quiet=args.quiet, selected_jobs=target_jobs) elif args.command == 'info': if target_jobs: args.arguments = [x["_id"] for x in target_jobs] elif len(args.arguments) == 1 and args.arguments[0] == 'all': args.arguments = [x["_id"] for x in client.get_all_jobs()] for id in args.arguments: info_text = "NOT FOUND" try: job = client.get_job(id) info_text = json.dumps(job, indent=3) except: pass print("%s:\n%s" % (id, info_text)) elif args.command == 'rm': if target_jobs: joblist = [x["_id"] for x in target_jobs] elif args.arguments: joblist = args.arguments else: joblist = shlex.split(sys.stdin.read()) rm_job(client, joblist, verbose=args.verbose) elif args.command == 'logs': if target_jobs: if len(target_jobs) != 1: print('Logs can only be retrieved when filter select one job (actual selection count = %s)' % len(target_jobs)) program_result.return_code = -1 return(program_result) args.arguments = [x["_id"] for x in target_jobs] if not args.arguments: print('Please specify job list in arguments or using filter.') program_result.return_code = -1 return(program_result) for jid in args.arguments: log_items = client.get_log_items(jid) for log in log_items: for record in log["records"]: print(record["message"]) elif args.command == 'download': if target_jobs: if len(target_jobs) != 1: print('Jobs can only be downloaded when filter select one job (actual selection count = %s)' % len(target_jobs)) program_result.return_code = -1 return(program_result) args.arguments = [x["_id"] for x in target_jobs] for jid in args.arguments: job = client.get_job(jid) for attachment in job['attachments']: print('downloading %s' % attachment['name']) with open(attachment['name'], 'wb') as f: f.write(client.download_job_attachment(id, attachment['name'])) elif args.command == 'execute': if target_jobs: print('Execute command does not support job filtering') program_result.return_code = -1 return(program_result) inputs = [{'name': basename(a), 'filename': a} for a in args.arguments] if args.verbose: for i in inputs: print("Uploading %s as attachment name %s" % (i['filename'], i['name'])) execute_job(client, inputs, args.verbose, args.details, args.nodelete) else: print("Unknown command: %s" % args.command) program_result.return_code = -1 return(program_result) return(program_result)
def submit_model_data(self, attachments=None, gzip=False, info_callback=None, info_to_monitor=None): """Submits a job to the cloud service. Args: attachments: A list of attachments. Each attachement is a dict with the following keys: - 'name' : the name of the attachment - 'data' : the data for the attachment gzip: If ``True``, data is gzipped before sent over the network info_callback: A call back to be called when some info are available. That callback takes one parameter that is a dict containing the info as they are available. info_to_monitor: A set of information to monitor with info_callback. Currently, can be ``jobid`` and ``progress``. """ self.__vars = None self.timed_out = False self.results.clear() if not info_to_monitor: info_to_monitor = {} # check that url is valid parts = urlparse(self.docloud_context.url) if not parts.scheme: raise DOcloudConnectorException( "Malformed URL: '%s': No schema supplied." % self.docloud_context.url) proxies = self.docloud_context.proxies try: client = JobClient(self.docloud_context.url, self.docloud_context.key, proxies=proxies) except TypeError: # docloud client <= 1.0.172 do not have the proxes warnings.warn( "Using a docloud client that do not support warnings in init()", UserWarning) client = JobClient(self.docloud_context.url, self.docloud_context.key) self.log("client created") if proxies: self.log("proxies = %s" % proxies) # prepare client if self.docloud_context.log_requests: client.rest_callback = \ lambda m, u, *a, **kw: self._rest_callback(m, u, *a, **kw) client.verify = self.docloud_context.verify client.timeout = self.docloud_context.get('timeout', None) try: try: # Extract the list of attachment names att_names = [a['name'] for a in attachments] # create job jobid = client.create_job( attachments=att_names, parameters=self.docloud_context.job_parameters) self.log("job creation submitted, id is: {0!s}".format(jobid)) if info_callback and 'jobid' in info_to_monitor: info_callback({'jobid': jobid}) except ConnectionError as c_e: raise DOcloudConnectorException( "Cannot connect to {0}, error: {1}".format( self.docloud_context.url, str(c_e))) try: # now upload data for a in attachments: pos = 0 if 'data' in a: att_data = {'data': a['data']} elif 'file' in a: att_data = {'file': a['file']} pos = a['file'].tell() elif 'filename' in a: att_data = {'filename': a['filename']} client.upload_job_attachment(jobid, attid=a['name'], **att_data) self.log("Attachment: %s has been uploaded" % a['name']) if self.docloud_context.debug_dump_dir: target_dir = self.docloud_context.debug_dump_dir if not os.path.exists(target_dir): os.makedirs(target_dir) self.log("Dumping input attachment %s to dir %s" % (a['name'], target_dir)) with open(os.path.join(target_dir, a['name']), "wb") as f: if 'data' in 'a': if isinstance(a['data'], bytes): f.write(a['data']) else: f.write(a['data'].encode('utf-8')) else: a['file'].seek(pos) f.write(a['file']) # execute job client.execute_job(jobid) self.log("DOcplexcloud execute submitted has been started") # get job execution status until it's processed or failed timedout = False try: self._executionStatus = self.wait_for_completion( client, jobid, info_callback=info_callback, info_to_monitor=info_to_monitor) except DOcloudInterruptedException: timedout = True self.log("docloud execution has finished") # get job status. Do this before any time out handling self.jobInfo = client.get_job(jobid) if self.docloud_context.fire_last_progress and info_callback: progress_data = self.map_job_info_to_progress_data( self.jobInfo) info_callback({'progress': progress_data}) if timedout: self.timed_out = True self.log("Solve timed out after {waittime} sec".format( waittime=self.docloud_context.waittime)) return # get solution => download all attachments try: for a in client.get_job_attachments(jobid): if a['type'] == 'OUTPUT_ATTACHMENT': name = a['name'] self.log("Downloading attachment '%s'" % name) attachment_as_string = self._as_string( client.download_job_attachment(jobid, attid=name)) self.results[name] = attachment_as_string if self.docloud_context.debug_dump_dir: target_dir = self.docloud_context.debug_dump_dir if not os.path.exists(target_dir): os.makedirs(target_dir) self.log("Dumping attachment %s to dir %s" % (name, target_dir)) with open(os.path.join(target_dir, name), "wb") as f: f.write( attachment_as_string.encode('utf-8')) except DOcloudNotFoundError: self.log("no solution in attachment") self.log("docloud results have been received") # on_solve_finished_cb if self.docloud_context.on_solve_finished_cb: self.docloud_context.on_solve_finished_cb(jobid=jobid, client=client, connector=self) return finally: if self.docloud_context.delete_job: deleted = client.delete_job(jobid) self.log("delete status for job: {0!s} = {1!s}".format( jobid, deleted)) finally: client.close()