def _download(full_path, local_folder_path, dry_run=False): """ Given a folder or file, download all the files contained within it (not recursive). """ if dry_run: print('Running in dry run mode. Not downloading any files.') local_folder_path = os.path.expanduser(local_folder_path) if not os.path.exists(local_folder_path): print("Creating local download folder {}".format(local_folder_path)) if not dry_run: if not os.path.exists(local_folder_path): os.makedirs(local_folder_path) # API will determine depth based on number of "/" in the glob # Add */** to match in any vault (recursive) files = Object.all(glob=full_path, limit=1000, object_type='file') if not files: print("No file(s) found at --full-path {}\nIf attempting to download " "multiple files, try using a glob 'vault:/path/folder/*'" .format(full_path)) for file_ in files: if not dry_run: file_.download(local_folder_path) print('Downloaded: {} to {}/{}'.format( file_.full_path, local_folder_path, file_.filename))
def _object_list_helper(self, **params): from solvebio import Object params.update({ 'vault_id': self.id, }) items = Object.all(client=self._client, **params) return items
def _object_list_helper(self, **params): from solvebio import Object params.update({ 'vault_id': self.id, }) items = Object.all(client=self._client, **params) return items
def tag(args): """Tags a list of paths with provided tags""" objects = [] for full_path in args.full_path: # API will determine depth based on number of "/" in the glob # Add */** to match in any vault (recursive) objects.extend(list(Object.all( glob=full_path, permission='write', limit=1000))) seen_vaults = {} taggable_objects = [] exclusions = args.exclude or [] # Runs through all objects to get tagging candidates # taking exclusions and object_type filters into account. for object_ in objects: if should_exclude(object_.full_path, exclusions, dry_run=args.dry_run): continue if should_tag_by_object_type(args, object_): taggable_objects.append(object_) seen_vaults[object_.vault_id] = 1 if not taggable_objects: print('No taggable objects found at provided locations.') return # If args.no_input, changes will be applied immediately. # Otherwise, prints the objects and if tags will be applied or not for object_ in taggable_objects: object_.tag( args.tag, remove=args.remove, dry_run=args.dry_run, apply_save=args.no_input) # Prompts for confirmation and then runs with apply_save=True if not args.no_input: print('') res = raw_input( 'Are you sure you want to apply the above changes to ' '{} object(s) in {} vault(s)? [y/N] ' .format(len(taggable_objects), len(seen_vaults.keys())) ) print('') if res.strip().lower() != 'y': print('Not applying changes.') return for object_ in taggable_objects: object_.tag( args.tag, remove=args.remove, dry_run=args.dry_run, apply_save=True)
def import_file(args): """ Given a dataset and a local path, upload and import the file(s). Command arguments (args): * create_dataset and it's args * capacity * template_id * template_file * capacity * tag * metadata * metadata_json_file * create_vault * full_path * commit_mode * remote_source * dry_run * follow * file (list) """ if args.dry_run: print("NOTE: Running import command in dry run mode") full_path, path_dict = Object.validate_full_path(args.full_path) files_list = [] if args.remote_source: # Validate files for file_fp in args.file: files_ = list(Object.all(glob=file_fp, limit=1000)) if not files_: print("Did not find any {}files at path {}".format( 'remote ' if args.remote_source else '', file_fp)) else: for file_ in files_: print("Found file: {}".format(file_.full_path)) files_list.append(file_) else: # Local files # Note: if these are globs or folders, then this will # create a multi-file manifest which is deprecated # and should be updated to one file per import. files_list = [fp for fp in args.file] if not files_list: print("Exiting. No files were found at the following {}paths: {}" .format('remote ' if args.remote_source else '', ', '.join(args.file))) sys.exit(1) if args.template_id: try: template = DatasetTemplate.retrieve(args.template_id) except SolveError as e: if e.status_code != 404: raise e print("No template with ID {0} found!".format(args.template_id)) sys.exit(1) elif args.template_file: template = _create_template_from_file(args.template_file, args.dry_run) else: template = None # Ensure the dataset exists. Create if necessary. if args.create_dataset: dataset = create_dataset(args, template=template) else: try: dataset = Object.get_by_full_path(full_path, assert_type='dataset') except solvebio.errors.NotFoundError: print("Dataset not found: {0}".format(full_path)) print("Tip: use the --create-dataset flag " "to create one from a template") sys.exit(1) if args.dry_run: print("Importing the following files/paths into dataset: {}" .format(full_path)) for file_ in files_list: if args.remote_source: print(file_.full_path) else: print(file_) return # Generate a manifest from the local files imports = [] for file_ in files_list: if args.remote_source: kwargs = dict(object_id=file_.id) else: manifest = solvebio.Manifest() manifest.add(file_) kwargs = dict(manifest=manifest.manifest) # Add template params if template: kwargs.update(template.import_params) # Create the import import_ = DatasetImport.create( dataset_id=dataset.id, commit_mode=args.commit_mode, **kwargs ) imports.append(import_) if args.follow: dataset.activity(follow=True) else: mesh_url = 'https://my.solvebio.com/activity/' print("Your import has been submitted, view details at: {0}" .format(mesh_url)) return imports, dataset