def _build(self, parsed_args): """Build container """ if self.build: try: self._dockerpy_test() tag = self._repo_tag() dockerfile = self._dockerfile() print_stderr('Building {}'.format(tag)) start_time = milliseconds() # TODO - build_args, labels, nocache, quiet, forcerm result = self.dockerpy.images.build(path=self.working_dir, tag=tag, dockerfile=dockerfile, pull=self.pull, rm=True) print_stderr('Finished ({} msec)'.format(milliseconds() - start_time)) for log_line in result[1]: txt = log_line.get('stream', '').strip() if txt is not None and txt != '': self.messages.append( ('build', log_line.get('stream', None))) return True except Exception as err: if self.ignore_errors: self.messages.append(('push', str(err))) return False else: raise
def _push(self, parsed_args): """Push built container """ if self.push: try: self._dockerpy_test() tag = self._repo_tag() print_stderr('Pushing {}'.format(tag)) start_time = milliseconds() # TODO - auth_config for log_line in self.dockerpy.images.push(tag, stream=True, decode=True): text_line = log_line.get('status', '').strip() if text_line not in ('Preparing', 'Waiting', 'Layer already exists', 'Pushing', 'Pushed', 'denied'): self.messages.append(('push', text_line)) print_stderr('Finished ({} msec)'.format(milliseconds() - start_time)) return True except Exception as err: if self.ignore_errors: self.messages.append(('push', str(err))) return False else: raise
def show_curl(response_object, verify=True): if TAPIS_CLI_SHOW_CURL: try: curl_text = curlify.to_curl(response_object.request, verify) except Exception as err: curl_text = 'Failed to render curl command: {0}'.format(err) print_stderr(curl_text)
def take_action(self, parsed_args): parsed_args = self.preprocess_args(parsed_args) self.requests_client.setup(API_NAME, SERVICE_VERSION) self.update_payload(parsed_args) (storage_system, file_path) = self.parse_url(parsed_args.agave_uri) headers = ['deleted', 'skipped', 'warnings', 'elapsed_msec'] (deleted, skipped, warnings, elapsed) = [[], [], [], 0] try: start_time = milliseconds() delete(file_path, storage_system, agave=self.tapis_client) deleted.append(file_path) elapsed = milliseconds() - start_time if parsed_args.progress: print_stderr('Deleted {0}'.format(parsed_args.agave_uri)) except Exception as exc: skipped.append(file_path) warnings.append(exc) if parsed_args.formatter in ('json', 'yaml'): data = [deleted, skipped, [str(w) for w in warnings], elapsed] else: data = [len(deleted), len(skipped), len(warnings), elapsed] return (tuple(headers), tuple(data))
def _backup(self, parsed_args): """Backup existing deployed assets """ if self.backup: dep_sys = self.document['deploymentSystem'] dep_path = self.document['deploymentPath'] backup_dep_path = dep_path + '.' + str(seconds()) print_stderr('Backing up agave://{}/{}'.format(dep_sys, dep_path)) start_time = milliseconds() self.messages.append( ('backup', 'src: agave://{}/{}'.format(dep_sys, dep_path))) self.messages.append( ('backup', 'dst: agave://{}/{}'.format(dep_sys, backup_dep_path))) try: manage.move(dep_path, system_id=dep_sys, destination=backup_dep_path, agave=self.tapis_client) print_stderr('Finished ({} msec)'.format(milliseconds() - start_time)) return True except Exception as exc: self.messages.append(('backup', str(exc))) print_stderr('Failed ({} msec)'.format(milliseconds() - start_time)) return False return True
def _upload(self, parsed_args): """Upload asset bundle """ if self.upload: dep_sys = self.document['deploymentSystem'] dep_path = self.document['deploymentPath'] dep_path_parent = os.path.dirname(dep_path) dep_path_temp = os.path.join(dep_path_parent, self._bundle()) print_stderr('Uploading app bundle to agave://{}/{}'.format( dep_sys, dep_path)) start_time = milliseconds() try: # TODO - incorporate working directory manage.makedirs(dep_path_parent, system_id=dep_sys, permissive=True, agave=self.tapis_client) manage.delete(dep_path, system_id=dep_sys, permissive=True, agave=self.tapis_client) uploaded, skipped, errors, ul_bytes, ec_download = upload.upload( self._bundle(), system_id=dep_sys, destination=dep_path_parent, progress=True, agave=self.tapis_client) manage.move(dep_path_temp, system_id=dep_sys, destination=dep_path, agave=self.tapis_client) # Rename dep_path_parent/bundle to dep_path print_stderr('Finished ({} msec)'.format(milliseconds() - start_time)) for u in uploaded: self.messages.append(('upload', u)) for e in errors: self.messages.append(('upload', e)) return True except Exception as exc: self.messages.append(('upload', str(exc))) print_stderr('Failed ({} msec)'.format(milliseconds() - start_time)) return False return True
def upload(local_file_path, system_id, destination='/', excludes=None, includes=None, force=False, sync=False, atomic=False, progress=True, agave=None): (uploaded, skipped, errors, ul_bytes, runtime) = ([], [], [], 0, None) if excludes is None: excludes = [] # Compile files to be uploaded if progress: print_stderr('Finding file(s) to upload...') start_time = seconds() upload_files = _local_walk(local_file_path) elapsed_walk = seconds() - start_time msg = 'Found {0} file(s) in {1}s'.format(len(upload_files), elapsed_walk) logger.debug(msg) if progress: print_stderr(msg) # Filter out excludes # TODO - make sure abs and relpaths are supported # TODO - support some kind of wildcard match # upload_files = [f for f in upload_files_all if f[0] not in excludes] # if progress: # print_stderr('Excluding {0} file(s)'.format( # len(upload_files_all) - len(upload_files))) # Compute which, if any, remote directories might need to be created # Note that these directory names will be relative to the destination path if os.path.isfile(local_file_path): dir_parent = os.path.dirname(local_file_path) dir_basename = '.' else: dir_parent = os.path.dirname(local_file_path) dir_basename = os.path.basename(local_file_path) local_dirs = [ relpath(os.path.dirname(f[0]).replace(dir_parent, '')) for f in upload_files ] # Before adding the grandparent to set of created dirs, add the destinations to each upload record for idx, uf in enumerate(upload_files): upload_files[idx].append(os.path.join(destination, local_dirs[idx])) # Remove duplicates as each member of create_dirs represents # at least one API call local_dirs.insert(0, dir_basename) create_dirs = [] for d in local_dirs: if d not in create_dirs and d not in ('.', ''): create_dirs.append(d) # Create the remote directories # Do this ahead of time (and manually) to avoid relying on Tapis' files # since that service's path handling behavior can be tricky for rdir in create_dirs: if progress: print_stderr('Creating remote directory "{0}"...'.format(rdir)) makedirs(relpath(rdir), system_id=system_id, destination=abspath(destination), agave=agave) # Do the actual uploads start_time_all = seconds() for ufile in upload_files: if progress: print_stderr('Uploading {0}...'.format(ufile[0])) try: _upload(ufile[0], system_id, destination=ufile[3], size=ufile[1], timestamp=ufile[2], includes=includes, excludes=excludes, force=force, sync=sync, agave=agave) # TRack uploaded files uploaded.append(ufile[0]) # Track cumulative data size ul_bytes = ul_bytes + ufile[1] # TODO - implement a separate exception for FileExcluded except FileExcludedError as fexc: errors.append(fexc) skipped.append(ufile[0]) except FileExistsError as fxerr: if sync or force: skipped.append(ufile[0]) else: errors.append(fxerr) except Exception as exc: errors.append(exc) elapsed_download = seconds() - start_time_all msg = 'Uploaded {0} files in {1}s'.format(len(uploaded), elapsed_download) logger.debug(msg) if progress: print_stderr(msg) return uploaded, skipped, errors, ul_bytes, elapsed_download
def _upload(self, parsed_args): """Upload asset bundle """ if self.upload: dep_sys = self.document['deploymentSystem'] dep_path = self.document['deploymentPath'] dep_path_parent = os.path.dirname(dep_path) # need the bundle basename for the upload/move workflow to work bundle_basename = os.path.basename(os.path.normpath( self._bundle())) # add date to make tmpdir unique from bundle and deploymentPath dep_path_temp = os.path.join(dep_path_parent, bundle_basename) \ + datetime.now().strftime("-%Y-%m-%d") print_stderr( 'Uploading app asset directory "{0}" to agave://{1}/{2}'. format(self._bundle(), dep_sys, dep_path)) start_time = milliseconds() try: # First, check existence of bundle. No point in taking other action # if it does not exist if not os.path.exists(self._bundle()): raise FileNotFoundError( 'Unable to locate asset directory "{}"'.format( self._bundle())) try: # need relative destination here because # agavepy permissions check will fail on '/' # for public systems manage.makedirs(os.path.basename(dep_path_temp), system_id=dep_sys, permissive=True, destination=dep_path_parent, agave=self.tapis_client) # clear out destination directory manage.delete(dep_path, system_id=dep_sys, permissive=True, agave=self.tapis_client) except Exception as err: self.messages.append(('upload', str(err))) # upload bundle to tmp dir uploaded, skipped, errors, ul_bytes, ec_download = upload.upload( self._bundle(), system_id=dep_sys, destination=dep_path_temp, progress=True, agave=self.tapis_client) # move tmp dir bundle to the destination dir manage.move(os.path.join(dep_path_temp, bundle_basename), system_id=dep_sys, destination=dep_path, agave=self.tapis_client) # delete tmp dir manage.delete(dep_path_temp, system_id=dep_sys, permissive=True, agave=self.tapis_client) print_stderr('Finished ({} msec)'.format(milliseconds() - start_time)) for u in uploaded: self.messages.append(('upload', u)) for e in errors: self.messages.append(('upload', e)) if len(errors) > 0: if self.ignore_errors is False: raise Exception('Upload failures: {}'.format( errors.join(';'))) return True except Exception as exc: if self.ignore_errors: self.messages.append(('upload', str(exc))) print_stderr('Failed ({} msec)'.format(milliseconds() - start_time)) return False else: raise return True
def download(source, system_id, destination='.', includes=None, excludes=None, force=False, sync=False, atomic=False, progress=False, agave=None): downloaded, skipped, errors, dl_bytes, runtime = ([], [], [], 0, None) if excludes is None: excludes = [] if includes is None: includes = [] if progress: print_stderr('Walking remote resource...') start_time = seconds() all_targets = walk(source, system_id=system_id, dotfiles=True, recurse=True, agave=agave) elapsed_walk = seconds() - start_time msg = 'Found {0} file(s) in {1}s'.format(len(all_targets), elapsed_walk) logger.debug(msg) if progress: print_stderr(msg) # Filters that build up list of paths to create and files to download abs_names = [f['path'] for f in all_targets] sizes = [f['length'] for f in all_targets] mods = [datestring_to_epoch(f['lastModified']) for f in all_targets] # Create local destination paths dirs = [os.path.dirname(p) for p in abs_names] if not isfile(source, system_id=system_id, agave=agave): sub_root = None if source.endswith('/'): sub_root = source else: sub_root = os.path.dirname(source) sub_root = re.sub('([/]+)$', '', sub_root) dirs = [re.sub(sub_root, '', d) for d in dirs] dest_names = [ os.path.join(destination, relpath(re.sub(sub_root, '', f))) for f in abs_names ] dirs = [d for d in dirs if d != sub_root] make_dirs = [os.path.join(destination, relpath(p)) for p in dirs] # Create destinations for dir in make_dirs: makedirs(dir, exist_ok=True) else: sub_root = os.path.dirname(source) dest_names = [os.path.join(destination, os.path.basename(source))] # Do the downloads downloads = [list(a) for a in zip(abs_names, sizes, mods, dest_names)] start_time_all = seconds() for src, size, mod, dest in downloads: if progress: print_stderr('Downloading {0}...'.format(os.path.basename(src))) try: _download(src, system_id, size=size, timestamp=mod, dest=dest, includes=includes, excludes=excludes, force=force, sync=sync, atomic=False, agave=agave) downloaded.append(src) # Track cumulative data size dl_bytes = dl_bytes + size except (FileExistsError, FileExcludedError) as fxerr: if sync or force: skipped.append(src) errors.append(fxerr) else: errors.append(fxerr) except Exception as exc: errors.append(exc) elapsed_download = seconds() - start_time_all msg = 'Downloaded {0} files in {1}s'.format(len(abs_names), elapsed_download) logger.debug(msg) if progress: print_stderr(msg) return downloaded, skipped, errors, dl_bytes, elapsed_walk + elapsed_download
def download(source, job_uuid, destination=None, excludes=None, includes=None, force=False, sync=False, atomic=False, progress=False, agave=None): downloaded, skipped, errors, runtime = ([], [], [], None) if destination is None: dest_dir = str(job_uuid) else: dest_dir = destination # else: # includes = [os.path.join('/', i) for i in includes] if progress: print_stderr('Walking remote resource...') start_time = seconds() # Try to avoid timeouts since walk is already pretty slow agave.refresh() all_targets = walk(source, job_uuid=job_uuid, dotfiles=True, recurse=True, agave=agave) elapsed_walk = seconds() - start_time msg = 'Found {0} file(s) in {1}s'.format(len(all_targets), elapsed_walk) logger.debug(msg) if progress: print_stderr(msg) # Extract absolute names # Under jobs, paths all begin with / paths = [f['path'] for f in all_targets] # Tapis Jobs returns a spurious "null/" at the start of # each file's path. This is a temporary workaround. paths = [re.sub('null/', '/', p) for p in paths] sizes = [f['length'] for f in all_targets] mods = [datestring_to_epoch(f['lastModified']) for f in all_targets] # Create local destination paths dirs = [os.path.dirname(p) for p in paths] make_dirs = [ os.path.join(dest_dir, relpath(p)) for p in dirs if p not in ('/', './') ] # Create destinations for dir in make_dirs: makedirs(dir, exist_ok=True) # Local filenames including destination directory rel_paths = [os.path.join(dest_dir, relpath(p)) for p in paths] downloads = [list(a) for a in zip(paths, sizes, mods, rel_paths)] start_time_all = seconds() # Try to avoid timeouts since walk is already pretty slow agave.refresh() for src, size, mod, dest in downloads: if progress: print_stderr('Downloading {0}...'.format(os.path.basename(src))) try: # TODO - refresh token is size > threshold _download(src, job_uuid, size=size, timestamp=mod, dest=dest, includes=includes, excludes=excludes, atomic=atomic, force=force, sync=sync, agave=agave) downloaded.append(src) except FileExcludedError as fexc: errors.append(fexc) skipped.append(src) except OutputFileExistsError as ofe: if sync or force: skipped.append(src) errors.append(ofe) except Exception as exc: errors.append(exc) elapsed_download = seconds() - start_time_all msg = 'Downloaded {0} files in {1}s'.format(len(paths), elapsed_download) logger.debug(msg) if progress: print_stderr(msg) return downloaded, skipped, errors, elapsed_walk + elapsed_download