def job_set_results(self, params): """Gets stderr and stdout. """ super(SubmitShellJob, self).job_set_results(params) temp_dir = self.interpreter.filePool.create_directory( prefix='vt_tmp_shelljobout_').name queue = QueueCache.get(params['destination'], params['queue']) queue.download(params['job_id'], ['_stderr', '_stdout'], directory=temp_dir) self.set_output('stderr', PathObject(os.path.join(temp_dir, '_stderr'))) self.set_output('stdout', PathObject(os.path.join(temp_dir, '_stdout')))
def compute(self): """ compute() -> None Use BatchDisplayCellEvent to display a serie of SVG files """ if self.has_input("File"): fileValue = self.get_input("File") else: fileValue = None if fileValue: batchDisplayEvent = BatchDisplayCellEvent() # FIXME: Will this work? there should be no # self.currentVersion in the module (there is a # self.version) batchDisplayEvent.vistrail = (self.vistrailName, self.currentVersion) f = open(fileValue.name, 'r') for line in f.read().split('\n'): comps = line.split('|') if len(comps) == 2: e = DisplayCellEvent() e.sheetReference = StandardSingleCellSheetReference() e.sheetReference.sheetName = comps[1] e.cellType = SVGCellWidget from os.path import abspath, basename, dirname, join F = PathObject( join(dirname(abspath(fileValue.name)), basename(comps[0]))) e.inputPorts = (F, ) batchDisplayEvent.displayEvents.append(e) f.close() spreadsheetController.postEventToSpreadsheet(batchDisplayEvent)
def compute(self): self.check_input('url') url = self.get_input('url') insecure = self.get_input('insecure') local_path = self.download(url, insecure) self.set_output('local_path', local_path) local_dir = PathObject(local_path) self.set_output('directory', local_dir)
def compute(self): self.check_input('url') url = self.get_input('url') insecure = self.get_input('insecure') local_filename = self.download(url, insecure) self.set_output('local_filename', local_filename) result = PathObject(local_filename) self.set_output('file', result)
def compute(self): job = self.get_input('job') assert isinstance(job, RemoteJob) destination = self.interpreter.filePool.create_directory( prefix='vt_tmp_shelljobout_').name target = os.path.join(destination, 'dir') with ServerLogger.hide_output(): job.queue.download(job.job_id, self.get_input('pathname'), destination=target, recursive=True) self.set_output('directory', PathObject(target))
def compute(self): # if server, grab local file using checksum id if self.is_server: self.check_input('checksum') self.checksum = self.get_input("checksum") # get file path path_url = "%s/datasets/path/%s/"%(self.base_url, self.checksum) dataset_path_request = urllib2.urlopen(url=path_url) dataset_path = dataset_path_request.read() if os.path.isfile(dataset_path): out_file = PathObject(dataset_path) self.set_output("file", out_file) else: # is client self.check_input('file') self.in_file = self.get_input("file") if os.path.isfile(self.in_file.name): # do size check size = os.path.getsize(self.in_file.name) if size > 26214400: show_warning("File is too large", "file is larger than 25MB, " "unable to sync with web repository") self.set_output("file", self.in_file) else: # compute checksum f = open(self.in_file.name, 'r') self.checksum = hashlib.sha1() block = 1 while block: block = f.read(128) self.checksum.update(block) f.close() self.checksum = self.checksum.hexdigest() # upload/download file self.data_sync() # set checksum param in module if not self.has_input('checksum'): self.change_parameter('checksum', [self.checksum]) else: # local file not present if self.has_input('checksum'): self.checksum = self.get_input("checksum") # download file self.data_sync()
def run_figure(self, code_str, graphics_dev, width, height, excluded_inputs=set(['source'])): fname = self.interpreter.filePool.create_file(prefix='vtr', suffix='.' + graphics_dev) r_temp_files.append(fname) robjects.r[graphics_dev](file=fname, width=width, height=height) self.run_code(code_str, use_input=True, excluded_inputs=excluded_inputs) robjects.r['dev.off']() image_file = PathObject(fname) self.set_output('imageFile', image_file)
def compute(self): machine = self.get_machine() jm = self.job_monitor() id = self.signature job = jm.getCache(id) if not job: remote = self.get_input('Remote Location') local = self.get_input('Local File') override = self.force_get_input('Override', False) if '://' not in remote: remote = self.add_prefix(remote, machine) if os.path.exists(local.name): if override == False: raise ModuleError(self, 'Output already exists') else: if os.path.isdir(local.name): shutil.rmtree(local.name) else: os.unlink(local.name) tempfile = machine.remote.send_command('mktemp -d -u').strip() result = self.call_hdfs('dfs -get %s %s' % (remote, tempfile), machine) # too slow with many files #res = machine.send_command("get -r %s %s" % (tempfile, local.name) ) # tar files to increase speed result = machine.local.send_command('mkdir %s' % local.name) result = machine.sync(local.name, tempfile, mode=machine.MODE_REMOTE_LOCAL, use_tar=True) result = machine.remote.rm(tempfile, force=True, recursively=True) d = {'remote': remote, 'local': local.name} self.set_job_machine(d, machine) jm.setCache(id, d, self.job_name()) job = jm.getCache(id) self.set_output('Local File', PathObject(job.parameters['local'])) self.set_output('Machine', machine)
# file on repository mirrors local file, so use local file if self.up_to_date and os.path.isfile(self.in_file.name): self.set_output("file", self.in_file) else: # local file not present or out of date, download or use cache self.url = "%s/datasets/download/%s" % (self.base_url, self.checksum) local_filename = os.path.join(package_directory, cache_filename(self.url)) if not self._file_is_in_local_cache(local_filename): # file not in cache, download. try: urllib.urlretrieve(self.url, local_filename) except IOError, e: raise ModuleError(self, ("Invalid URL: %s" % e)) out_file = PathObject(local_filename) debug.warning('RepoSync is using repository data') self.set_output("file", out_file) def compute(self): # if server, grab local file using checksum id if self.is_server: self.check_input('checksum') self.checksum = self.get_input("checksum") # get file path path_url = "%s/datasets/path/%s/"%(self.base_url, self.checksum) dataset_path_request = urllib2.urlopen(url=path_url) dataset_path = dataset_path_request.read() if os.path.isfile(dataset_path):
def _set_result(self, path): self.set_output('path', PathObject(path))
def set_result(self, path): persistent_path = PathObject(path) self.set_output("value", persistent_path)
def remap(old_func, new_module): controller = _get_controller() value = PathObject(old_func.params[0].strValue) new_function = create_function(new_module, name, [value]) return [('add', new_function, 'module', new_module.id)]
def _set_result(self, results, latest): self.set_output('most_recent', PathObject(latest.filename)) self.set_output('results', [PathObject(e.filename) for e in results]) self.set_output('count', len(results))
def convert_output_param(value, _type): if issubclass(_type, Path): return PathObject(value) if issubclass(_type, Color): return InstanceObject(tuple=value) return value
def _set_result(self, entry): self.set_output('path', PathObject(entry.filename))