def handle(self, *args, **options): # Collect the parameters. id = options['id'] uid = options['uid'] json = options['json'] root = options['root'] if not id or uid: logger.error(f"Must specify 'id' or 'uid' parameters.") return # Select project by id or uid. if id: query = Project.objects.filter(id=id) else: query = Project.objects.filter(uid=uid) # Get the project. project = query.first() # Project must exist. if not project and id: logger.error(f"Project with id={id} not found!") return # Project must exist. if not project and id: logger.error(f"Project with uid={uid} not found!") return # There 'data' field of the spec has the files. json_data = hjson.load(open(json)) json_data = json_data.get('data', []) # The data field is empty. if not json_data: logger.error(f"JSON file does not have a valid data field") return # The datalist is built from the json. data_list = [Bunch(**row) for row in json_data] # Add each collected datatype. for bunch in reversed(data_list): # This the path to the data. path = bunch.value # Makes the path relative if necessary. path = path if path.startswith("/") else os.path.join(root, path) # Create the data. auth.create_data(project=project, path=path, type=bunch.type, name=bunch.name, summary=bunch.summary, text=bunch.help)
def handle(self, *args, **options): # Collect the parameters. pid = options['pid'] did = options['did'] path = options['path'] update_toc = options["update"] text = options['text'] name = options['name'] type = options['type'] data = Data.objects.get_all(uid=did).first() # Work with existing data. if data: if update_toc: data.make_toc() print(f"*** Data id : {did} table of contents updated.") return project = Project.objects.filter(uid=pid).first() # Project must exist. if not project: logger.error(f"Project uid={pid} not found!") return # Slightly different course of action on file and directories. isfile = os.path.isfile(path) isdir = os.path.isdir(path) # The data field is empty. if not (isfile or isdir): logger.error(f"Path is not a file nor a directory: {path}") return # Generate alternate names based on input directory type. print(f"*** Project: {project.name} ({project.uid})") if isdir: print(f"*** Linking directory: {path}") altname = os.path.split(path)[0].split(os.sep)[-1] else: print(f"*** Linking file: {path}") altname = os.path.split(path)[-1] # Select the name. name = name or altname print(f"*** Creating data: {name}") # Create the data. auth.create_data(project=project, path=path, type=type, name=name, text=text)
def handle(self, *args, **options): # Collect the parameters. id = options['id'] uid = options['uid'] path = options['path'] text = options['text'] name = options['name'] type = options['type'] # Project selection paramter must be set. if not (id or uid): logger.error(f"Must specify 'id' or 'uid' parameters.") return # Select project by id or uid. if id: query = Project.objects.get_all(id=id) else: query = Project.objects.get_all(uid=uid) # Get the project. project = query.first() # Project must exist. if not project: logger.error(f"Project not found! id={id} uid={uid}.") return # Slightly different course of action on file and directories. isfile = os.path.isfile(path) isdir = os.path.isdir(path) # The data field is empty. if not(isfile or isdir): logger.error(f"Path is not a file a directory: {path}") return # Generate alternate names based on input directory type. print (f"*** Project: {project.name} ({project.uid})") if isdir: print(f"*** Linking directory: {path}") altname = os.path.split(path)[0].split(os.sep)[-1] else: print(f"*** Linking file: {path}") altname = os.path.split(path)[-1] # Select the name. name = name or altname print(f"*** Creating data: {name}") # Create the data. auth.create_data(project=project, path=path, type=type, name=name, text=text)
def make_data_dir(self, root_project, path, name): project = self.fs.projects.filter(name=root_project) user = self.fs.user["user"] auth.create_data(project=project.first(), user=user, name=name) # Refresh /data tab. self.fs.data = models.Data.objects.filter( project=project, state__in=(models.Data.READY, models.Data.PENDING)) line = self.fs.fs2ftp(path) self.respond('257 "%s" directory created.' % line.replace('"', '""')) logger.info(f"path={path}") return path
def ftp_STOR(self, file, mode='w'): root_project, tab, name, tail = parse_virtual_path(ftppath=file) if tab == 'data' and name and not tail: instance = query_tab(tab=tab, project=root_project, name=name, show_instance=True) if instance: self.respond('550 File already exists.') return else: data = auth.create_data( project=self.fs.projects.filter(name=root_project).first(), name=name) self.fs.data = chain(self.fs.data, models.Data.objects.filter(pk=data.pk)) testing = self.run_as_current_user(self.fs.open, file, mode + 'b') #fd = self.data_channel.file_obj logger.info(f"file={file}, testing={testing}") return os.path.join(data.get_data_dir(), name) # Return the real file name here, taken from the name and stuff. 1 / 0
def test_add_data(self): "Test adding data directory to a project using management commands " data_directory = auth.join(__file__, "..", "data") data = auth.create_data(project=self.project, path=data_directory) self.assertTrue(os.path.exists(data.get_data_dir()), "Directory not being linked")
def ftp_STOR(self, file, mode='w'): root_project, tab, name, tail = parse_virtual_path(ftppath=file) if name: project = self.fs.projects.filter(name=root_project).first() instance = query_tab(tab=tab, project=root_project, name=name, show_instance=True) if instance and not tail: self.respond('550 File already exists.') return if instance and tail: file = os.path.join(instance.get_data_dir(), *tail) # Ensure that the sub dirs in tail exist self.create_dirs(file=file, instance=instance, tail=tail) elif not instance and tab == "results": self.respond('550 Can not upload to the results tab.') return elif not instance and tab == 'data': instance = auth.create_data(project=project, name=name) if tail: file = os.path.join(instance.get_data_dir(), *tail) # Ensure that the sub dirs in tail exist self.create_dirs(file=file, instance=instance, tail=tail) else: file = os.path.join(instance.get_data_dir(), name) # Refresh the data tab self.fs.data = models.Data.objects.filter( project=project, state__in=(models.Data.READY, models.Data.PENDING)) #if self.is_linked_dir(file=file, data_dir=instance.get_data_dir()): # self.respond('550 Can not write to a linked directory.') # return # Load the stream into the DTP Data Transfer Protocol fd = self.run_as_current_user(self.fs.open, file, mode + 'b') self.load_dtp(file_object=fd) return file elif os.path.exists(file): self.respond('550 File already exists.') return else: # Can only upload to the /data for now. self.respond("550 Can not upload a file here.") return
def setUp(self): logger.setLevel(logging.WARNING) # Set up generic owner self.owner = models.User.objects.create_user(username=f"tested{util.get_uuid(10)}", email="*****@*****.**") self.owner.set_password("tested") self.project = auth.create_project(user=self.owner, name="tested", text="Text", summary="summary", uid="tested") self.project.save() # Set up generic data for editing self.data = auth.create_data(project=self.project, path=__file__, name="tested")
def test_dynamic_field(self): "Test data generator" from biostar.engine import const data = auth.create_data(self.project, path=__file__) display_type = const.DROPDOWN json_data = dict(display=display_type, value=data.name, source= 'PROJECT') field = factory.dynamic_field(json_data, project=self.project) if not field: self.assertFalse(f"field generator for display={display_type} failed")
def setUp(self): logger.setLevel(logging.WARNING) self.owner = models.User.objects.create(username="******", email="*****@*****.**") self.owner.set_password("testing") self.owner.save() self.project = auth.create_project(user=self.owner, name="Test project", privacy=models.Project.PUBLIC, uid="testing") data = auth.create_data(project=self.project, path=__file__) analysis = auth.create_analysis(project=self.project, json_text='{}', template="") self.job = auth.create_job(analysis=analysis) self.proj_params = dict(uid=self.project.uid) self.analysis_params = dict(uid=analysis.uid) self.data_params = dict(uid=data.uid) self.job_params = dict(uid=self.job.uid)
def ftp_MKD(self, path): # Copied from parent class # The 257 response is supposed to include the directory # name and in case it contains embedded double-quotes # they must be doubled (see RFC-959, chapter 7, appendix 2). root_project, tab, name, tail = parse_virtual_path(ftppath=path) projects = self.fs.projects user = self.fs.user["user"] # Creating a directory at the root dir if root_project and not tab: if projects.filter(name=root_project): self.respond('550 Directory already exists.') return else: # Create a new project project = auth.create_project(user=user, name=root_project) self.fs.projects = chain( projects, models.Project.objects.filter(pk=project.pk)) line = self.fs.fs2ftp(path) self.respond('257 "%s" directory created.' % line.replace('"', '""')) return path if tab == "data" and name and not tail: instance = query_tab(tab=tab, project=root_project, name=name, show_instance=True) if instance: self.respond('550 Directory already exists.') return else: project = self.fs.projects.filter(name=root_project) data = auth.create_data(project=project.first(), user=user, name=name) self.fs.data = chain(self.fs.data, models.Data.objects.filter(pk=data.pk)) line = self.fs.fs2ftp(path) self.respond('257 "%s" directory created.' % line.replace('"', '""')) logger.info(f"path={path}") return path # Add the data to the tail and update the toc_name. logger.info( f"new_dir={path}, path={path}, root_project={root_project}, project={projects.filter(name=root_project)}" ) 1 / 0 return path