def file_download(guid): """ Get file as stream :param guid: guid of file :return: Response obj """ try: file = files_.first(guid=guid) except (Exception): _logger.error(Exception.message) return make_response(jsonify({'error': 'File not found'}), 404) if file.scope != getScope(g.user.username): return make_response(jsonify({'error': 'File is not in your scope'}), 403) replicas = file.replicas for replica in replicas: if replica.se == current_app.config[ 'DEFAULT_SE'] and replica.status == 'ready': fullpath = current_app.config['DATA_PATH'] + replica.lfn f = open(fullpath, 'r') rr = Response(f.read(), status=200, content_type='application/octet-stream') rr.headers[ 'Content-Disposition'] = 'inline; filename="%s"' % file.lfn rr.headers['Content-MD5'] = file.md5sum file.downloaded += 1 files_.save(file) return rr #TODO: add support message return make_response(jsonify({'error': 'No ready replica'}), 404)
def new_replica(f, site): """ Creates new replica of file on se :param f: File to be replicated :param site: Site with target SE :type f: File :type site: Site :return: Created replica :rtype: Replica """ if not isinstance(f, File): raise Exception("Illegal file class: not File") if not isinstance(site, Site): raise Exception("Illegal file class: not File") r = Replica() r.original_id = f.id r.se = site.se r.status = "defined" r.lfn = get_file_path(f) replicas_.save(r) # Add replica to file f.replicas.append(r) files_.save(f) return r
def file_fetch(container_guid, lfn): """ GET: /pilot/file/<container_guid>/<lfn>/fetch Returns file in response :param container_guid: Guid of container :type container_guid: str :param lfn: Local FileName :type lfn: str :return: File :rtype: application/octet-stream """ if ':' in container_guid: container_guid = container_guid.split(':')[-1] container = conts_.first(guid=container_guid) cc = container.files for c in cc: f = c.file if f.lfn == lfn: replicas = f.replicas for replica in replicas: if replica.se == current_app.config['DEFAULT_SE']: fullpath = current_app.config['DATA_PATH'] + replica.lfn f = open(fullpath, 'r') rr = Response(f.read(), status=200, content_type='application/octet-stream') rr.headers['Content-Disposition'] = 'inline; filename="%s"' % f.lfn rr.headers['Content-MD5'] = f.md5sum f.downloaded += 1 files_.save(f) return rr raise WebpandaError('File not found')
def setFileMeta(fileid, lfn): f = files_.get(fileid) if f.fsize == None: f.fsize = fsize(lfn) if f.md5sum == None: f.md5sum = md5sum(lfn) if f.checksum == None: f.checksum = adler32(lfn) f.modification_time = datetime.utcnow() files_.save(f)
def reg_file_in_cont_byname(user, lfn, c, t): """ Registers file in catalog by filename :param user: File owner :param lfn: Local FileName :param c: Container to register in :param t: Type of file (input, output, log) :type user: User :type lfn: str :type c: Container :type t: str :return: True/False """ if not (isinstance(lfn, str) or isinstance(lfn, unicode)): raise Exception("Illegal lfn class: not str") if len(lfn) == 0: raise Exception("Illegal lfn length: zero") # Prepare File obj f = File() f.scope = getScope(user.username) f.attemptn = 0 f.guid = getGUID(f.scope, None) f.lfn = lfn f.status = "defined" f.transfertask = None # f.fsize = # md5sum = # checksum = f.modification_time = datetime.utcnow() f.downloaded = 0 # Save to fc files_.save(f) if not isinstance(f, File): raise Exception("Illegal file class: not File") if not isinstance(c, Container): raise Exception("Illegal catalog class: not Container") if not isinstance(t, str): raise Exception("Illegal type class: not str") if t not in ['input', 'output', 'log', 'intermediate']: raise Exception("Illegal type value: " + t) catalog_item = Catalog() catalog_item.file = f catalog_item.cont = c catalog_item.type = t # TODO: Add registration time catalog_.save(catalog_item) return True
def cloneReplica(replicaid, se): replica = replicas_.get(replicaid) file = replica.original replicas = file.replicas for r in replicas: if se == r.se: _logger.debug('Replica exists: id=%s' % r.id) # Update expired time return r.id # Define base replica from_se = sites_.first(se=replica.se) fromParams = {} if replica.status == 'link': lfn = getLinkLFN(file.scope, replica.lfn) else: lfn = replica.lfn # Define result replica params to_se = sites_.first(se=se) dest = '/'.join(lfn.split('/')[:-1]) toParams = {'dest': dest} ec, filesinfo = movedata({}, [replica.lfn], from_se.plugin, fromParams, to_se.plugin, toParams) if ec == 0: r = Replica() if file.fsize is None: file.fsize = filesinfo[replica.lfn]['fsize'] if file.md5sum is None: file.md5sum = filesinfo[replica.lfn]['md5sum'] if file.checksum is None: file.checksum = filesinfo[replica.lfn]['checksum'] r.se = se r.status = 'ready' r.lfn = lfn replicas_.save(r) file.modification_time = datetime.utcnow() file.replicas.append(r) files_.save(file) for cont in file.containers: linkReplica( r.id, '/%s/%s' % (client_config.DEFAULT_SCOPE, cont.cont.guid)) return r.id raise Exception('movedata return code: %s' % ec)
def cloneReplica(replicaid, se): replica = replicas_.get(replicaid) file = replica.original replicas = file.replicas for r in replicas: if se == r.se: _logger.debug('Replica exists: id=%s' % r.id) # Update expired time return r.id # Define base replica from_se = sites_.first(se=replica.se) fromParams = {} if replica.status == 'link': lfn = getLinkLFN(file.scope, replica.lfn) else: lfn = replica.lfn # Define result replica params to_se = sites_.first(se=se) dest = '/'.join(lfn.split('/')[:-1]) toParams = {'dest': dest} ec, filesinfo = movedata({}, [replica.lfn], from_se.plugin, fromParams, to_se.plugin, toParams) if ec == 0: r = Replica() if file.fsize is None: file.fsize = filesinfo[replica.lfn]['fsize'] if file.md5sum is None: file.md5sum = filesinfo[replica.lfn]['md5sum'] if file.checksum is None: file.checksum = filesinfo[replica.lfn]['checksum'] r.se = se r.status = 'ready' r.lfn = lfn replicas_.save(r) file.modification_time = datetime.utcnow() file.replicas.append(r) files_.save(file) for cont in file.containers: linkReplica(r.id, '/%s/%s' % (client_config.DEFAULT_SCOPE, cont.cont.guid)) return r.id raise Exception('movedata return code: %s' % ec)
def save(o): """ Wrapper for .save methods of Service instances :param o: object to save :type o: File/Container/Replica :return: True/False """ if isinstance(o, File): files_.save(o) return True elif isinstance(o, Container): conts_.save(o) return True elif isinstance(o, Replica): replicas_.save(o) return True return False
def file_fetch(container_guid, lfn): """ GET: /pilot/file/<container_guid>/<lfn>/fetch Returns file in response :param container_guid: Guid of container :type container_guid: str :param lfn: Local FileName :type lfn: str :return: File :rtype: application/octet-stream """ if ':' in container_guid: container_guid = container_guid.split(':')[-1] container = conts_.first(guid=container_guid) cc = container.files for c in cc: f = c.file if f.lfn == lfn: replicas = f.replicas for replica in replicas: if replica.se == current_app.config['DEFAULT_SE']: fullpath = current_app.config['DATA_PATH'] + replica.lfn f = open(fullpath, 'r') rr = Response(f.read(), status=200, content_type='application/octet-stream') rr.headers[ 'Content-Disposition'] = 'inline; filename="%s"' % f.lfn rr.headers['Content-MD5'] = f.md5sum f.downloaded += 1 files_.save(f) return rr raise WebpandaError('File not found')
def new_file(user, lfn): """ Creates new file object :param user: File owner :param lfn: Local FileName :type user: User :type lfn: str :return: Created file :rtype: File """ if not isinstance(user, User): raise Exception("Illegal user class: not User") if not isinstance(lfn, str): raise Exception("Illegal lfn class: not str") if len(lfn) == 0: raise Exception("Illegal lfn length: zero") # Prepare File obj f = File() f.scope = getScope(user.username) f.attemptn = 0 f.guid = getGUID(f.scope, None) f.lfn = lfn f.status = "defined" f.transfertask = None # f.fsize = # md5sum = # checksum = f.modification_time = datetime.utcnow() f.downloaded = 0 # Save to fc files_.save(f) return f
def upload(): form = request.form # Create a unique container quid for this particular batch of uploads. cguid = 'job.' + commands.getoutput('uuidgen') # Is the upload using Ajax, or a direct POST by the form? is_ajax = False if form.get("__ajax", None) == "true": is_ajax = True # Create new container container = Container() container.guid = cguid container.status = 'open' conts_.save(container) # Process files in request for upload in request.files.getlist("file"): # Define file params lfn = upload.filename.rsplit("/")[0] scope = getScope(g.user.username) guid = getGUID(scope, lfn) site = sites_.first(se=current_app.config['DEFAULT_SE']) # Target folder for these uploads. dir = '/' + os.path.join('system', scope, guid) target = site.datadir + dir try: os.makedirs(target) except: if is_ajax: return ajax_response( False, "Couldn't create upload directory: %s" % target) else: return "Couldn't create upload directory: %s" % target replfn = os.path.join(dir, lfn) destination = os.path.join(target, lfn) upload.save(destination) if os.path.isfile(destination): # Check file existence in catalog adler = adler32(destination) md5 = md5sum(destination) size = fsize(destination) file_id = ddm_checkifexists(lfn, size, adler, md5) if file_id: # If file exists file = files_.get(file_id) else: # Otherwise create new file = File() file.scope = scope file.guid = guid file.type = 'input' file.lfn = lfn file.token = '' file.status = 'defined' files_.save(file) setFileMeta(file.id, destination) replica = Replica() replica.se = site.se replica.status = 'ready' replica.lfn = replfn replica.original = file replicas_.save(replica) # Register file in container fc.reg_file_in_cont(file, container, 'input') else: return ajax_response(False, "Couldn't save file: %s" % target) if is_ajax: return ajax_response(True, cguid) else: return redirect(url_for("jobs.jobs"))
def new_job(): """Creates new job """ g.user = request.oauth.user scope = getScope(request.oauth.user.username) js = request.json data = js['data'] distr_id = data['sw_id'] params = data['script'] corecount = data['cores'] site = sites_.first(ce=current_app.config['DEFAULT_CE']) distr = distrs_.get(id) container = Container() guid = 'job.' + commands.getoutput('uuidgen') container.guid = guid container.status = 'open' conts_.save(container) # Process ftp files if 'ftp_dir' in data.keys(): ftp_dir = data['ftp_dir'] register_ftp_files(ftp_dir, scope, container.guid) # Process guid list if 'guids' in data.keys(): guids = data['guids'] for f in guids: if f != '': file_ = files_.first(guid=f) if file_ is not None: # Register file in catalog fc.reg_file_in_cont(file_, container, 'input') else: raise WebpandaError('File with guid %s not found' % f) ofiles = ['results.tgz'] # Starts cloneReplica tasks ftasks = prepareInputFiles(container.id, site.se) # Saves output files meta for lfn in ofiles: file = File() file.scope = scope file.guid = getGUID(scope, lfn) file.lfn = lfn file.status = 'defined' files_.save(file) # Register file in catalog fc.reg_file_in_cont(file, container, 'output') # Counts files allfiles = container.files nifiles = 0 nofiles = 0 for f in allfiles: if f.type == 'input': nifiles += 1 if f.type == 'output': nofiles += 1 # Defines job meta job = Job() job.pandaid = None job.status = 'pending' job.owner = request.oauth.user job.params = params job.distr = distr job.container = container job.creation_time = datetime.utcnow() job.modification_time = datetime.utcnow() job.ninputfiles = nifiles job.noutputfiles = nofiles job.corecount = corecount job.tags = data['tags'] if 'tags' in data.keys() else "" jobs_.save(job) # Async sendjob res = chord(ftasks)(async_send_job.s(jobid=job.id, siteid=site.id)) return {'id': job.id, 'container_id': guid}
def send_job(jobid, siteid): _logger.debug('Jobid: ' + str(jobid)) site = sites_.get(siteid) job = jobs_.get(int(jobid)) cont = job.container files_catalog = cont.files fscope = getScope(job.owner.username) datasetName = '{}:{}'.format(fscope, cont.guid) distributive = job.distr.name release = job.distr.release # Prepare runScript parameters = job.distr.command parameters = parameters.replace("$COMMAND$", job.params) parameters = parameters.replace("$USERNAME$", job.owner.username) parameters = parameters.replace("$WORKINGGROUP$", job.owner.working_group) # Prepare metadata metadata = dict(user=job.owner.username) # Prepare PanDA Object pandajob = JobSpec() pandajob.jobDefinitionID = int(time.time()) % 10000 pandajob.jobName = cont.guid pandajob.transformation = client_config.DEFAULT_TRF pandajob.destinationDBlock = datasetName pandajob.destinationSE = site.se pandajob.currentPriority = 1000 pandajob.prodSourceLabel = 'user' pandajob.computingSite = site.ce pandajob.cloud = 'RU' pandajob.VO = 'atlas' pandajob.prodDBlock = "%s:%s" % (fscope, pandajob.jobName) pandajob.coreCount = job.corecount pandajob.metadata = json.dumps(metadata) #pandajob.workingGroup = job.owner.working_group if site.encode_commands: # It requires script wrapper on cluster side pandajob.jobParameters = '%s %s %s "%s"' % (cont.guid, release, distributive, parameters) else: pandajob.jobParameters = parameters has_input = False for fcc in files_catalog: if fcc.type == 'input': f = fcc.file guid = f.guid fileIT = FileSpec() fileIT.lfn = f.lfn fileIT.dataset = pandajob.prodDBlock fileIT.prodDBlock = pandajob.prodDBlock fileIT.type = 'input' fileIT.scope = fscope fileIT.status = 'ready' fileIT.GUID = guid pandajob.addFile(fileIT) has_input = True if fcc.type == 'output': f = fcc.file fileOT = FileSpec() fileOT.lfn = f.lfn fileOT.destinationDBlock = pandajob.prodDBlock fileOT.destinationSE = pandajob.destinationSE fileOT.dataset = pandajob.prodDBlock fileOT.type = 'output' fileOT.scope = fscope fileOT.GUID = f.guid pandajob.addFile(fileOT) # Save replica meta fc.new_replica(f, site) if not has_input: # Add fake input fileIT = FileSpec() fileIT.lfn = "fake.input" fileIT.dataset = pandajob.prodDBlock fileIT.prodDBlock = pandajob.prodDBlock fileIT.type = 'input' fileIT.scope = fscope fileIT.status = 'ready' fileIT.GUID = "fake.guid" pandajob.addFile(fileIT) # Prepare lof file fileOL = FileSpec() fileOL.lfn = "%s.log.tgz" % pandajob.jobName fileOL.destinationDBlock = pandajob.destinationDBlock fileOL.destinationSE = pandajob.destinationSE fileOL.dataset = '{}:logs'.format(fscope) fileOL.type = 'log' fileOL.scope = 'panda' pandajob.addFile(fileOL) # Save log meta log = File() log.scope = fscope log.lfn = fileOL.lfn log.guid = getGUID(log.scope, log.lfn) log.type = 'log' log.status = 'defined' files_.save(log) # Save replica meta fc.new_replica(log, site) # Register file in container fc.reg_file_in_cont(log, cont, 'log') # Submit job o = submitJobs([pandajob]) x = o[0] try: #update PandaID PandaID = int(x[0]) job.pandaid = PandaID job.ce = site.ce except: job.status = 'submit_error' jobs_.save(job) return 0
def file_save(container_guid, lfn): """ POST: /pilot/file/<container_guid>/<lfn>/save Saves file from request, returns file guid :param container_guid: Guid of container :type container_guid: str :param lfn: Local FileName :type lfn: str :return: guid :rtype: json """ site = sites_.first(se=current_app.config['DEFAULT_SE']) if ':' in container_guid: container_guid = container_guid.split(':')[-1] container = conts_.first(guid=container_guid) if container.status != 'open': raise WebpandaError('Unable to upload: Container is not open') cc = container.files ff = None for c in cc: f = c.file if f.lfn == lfn: ff = f if not ff: ff = File() ff.scope = getScope(g.user.username) ff.lfn = lfn ff.guid = getGUID(ff.scope, ff.lfn) ff.status = 'defined' files_.save(ff) # Register file in container fc.reg_file_in_cont(ff, container, 'input') path = os.path.join(site.datadir, getScope(g.user.username), container.guid) replfn = '/' + os.path.join(getScope(g.user.username), container.guid, ff.lfn) destination = os.path.join(path, ff.lfn) for r in ff.replicas: if r.se == site.se: destination = site.datadir + r.lfn file_dir = '/'.join(destination.split('/')[:-1]) if r.status == 'ready': if os.path.isfile(destination): # Check fsize, md5 or adler raise WebpandaError('Replica exists') else: r.status = 'broken' replicas_.save(r) raise WebpandaError('Broken replica') elif r.status == 'defined': try: os.makedirs(file_dir) except(Exception): pass f = open(destination, 'wb') f.write(request.data) f.close() # Update file info setFileMeta(ff.id, destination) r.status = 'ready' replicas_.save(r) return {'guid': ff.guid} else: raise WebpandaError('Replica status: %s' % r.status) replica = Replica() if os.path.isfile(destination): raise WebpandaError('Unable to upload: File exists') try: os.makedirs(path) except(Exception): _logger.debug('Path exists: %s' % path) f = open(destination, 'wb') f.write(request.data) f.close() # Update file info setFileMeta(ff.id, destination) # Create/change replica replica.se = site.se replica.status = 'ready' replica.lfn = replfn replica.token = '' replica.original = ff replicas_.save(replica) return {'guid': ff.guid}
def registerLocalFile(arg, dirname, names, scope): """Register files from local dir to container :param arg: Container guid :param dirname: Abs dir :param names: File name :param scope: Scope to upload files in :return: """ site = sites_.first(se=client_config.DEFAULT_SE) _logger.debug(str(arg)) cont = conts_.first(guid=arg) files = cont.files for name in names: fpath = os.path.join(dirname, name) fobj = None # Check in container for file in files: if file.lfn == name: fobj = file # Check in catalog if not fobj: destination = os.path.join(dirname, name) adler = adler32(destination) md5 = md5sum(destination) size = fsize(destination) file_id = ddm_checkifexists(name, size, adler, md5) if file_id: # If file exists fobj = files_.get(file_id) if not fobj: fobj = File() fobj.scope = scope fobj.lfn = name fobj.guid = getGUID(fobj.scope, fobj.lfn) fobj.type = 'input' fobj.status = 'defined' files_.save(fobj) setFileMeta(fobj.id, fpath) # Register file in catalog fc.reg_file_in_cont(fobj, cont, "input") replicas = fobj.replicas replica = None for r in replicas: if r.se == site.se and r.status == 'ready': replica = r if not replica: ldir = '/' + os.path.join('system', fobj.scope, fobj.guid) ddm_localmakedirs(ldir) ddm_localcp(fpath[len(site.datadir):], ldir) replica = Replica() replica.se = site.se replica.status = 'ready' replica.token = '' replica.lfn = os.path.join(ldir, fobj.lfn) replica.original = fobj replicas_.save(replica)
def job(): """ New job form view :return: Response obj """ form = NewJobForm(request.form) if request.method == 'POST': site = sites_.get(int(form.site.data)) distr_name, distr_release = form.distr.data.split(':') distr = distrs_.first(name=distr_name, release=int(distr_release)) container_guid = form.container.data try: container = conts_.first(guid=container_guid) except(Exception): _logger.error(Exception.message) return make_response(jsonify({'error': 'Container not found'}), 404) if site.encode_commands: # By default frontend encodes with base64 job script parts separated by ";" # It requires script wrapper on cluster side jparams = form.params.data else: # Set site.encode_commands as False if you want to send command string without base64 encoding jparams = ';'.join([b64decode(command) for command in form.params.data.split(';')]) ifiles = request.form.getlist('ifiles[]') iguids = request.form.getlist('iguids[]') iconts = request.form.getlist('iconts[]') ofiles = ['{guid}.out.tgz'.format(guid=container.guid)] scope = getScope(g.user.username) # Process ftp files ftp_dir = form.ftpdir.data register_ftp_files(ftp_dir, scope, container.guid) # Process guid list for f in iguids: if f != '': file = files_.first(guid=f) if file is not None: # Register files in container fc.reg_file_in_cont(file, container, 'input') else: return make_response(jsonify({'error': "GUID {} not found".format(f)})) # Process containers for c in iconts: if c != '': try: form_cont = conts_.first(guid=c) except(Exception): _logger.error(Exception.message) return make_response(jsonify({'error': 'Container in form not found'}), 404) for f in form_cont.files: # Register file in catalog fc.reg_file_in_cont(f.file, container, 'input') # Processes urls for f in ifiles: if f != '': from_se, path, token = getUrlInfo(f) replfn = ':/'.join([from_se, path]) # Check if used before file_id = ddm_checkexternalifexists('', replfn) if file_id: file = files_.get(file_id) else: lfn = path.split('/')[-1] guid = getGUID(scope, lfn) file = File() file.scope = scope file.guid = guid file.type = 'input' file.lfn = lfn file.status = 'defined' files_.save(file) replica = Replica() replica.se = from_se replica.status = 'link' # Separate url & token replica.lfn = replfn replica.token = token replica.original = file replicas_.save(replica) # Register file in container fc.reg_file_in_cont(file, container, 'input') # Starts cloneReplica tasks ftasks = prepareInputFiles(container.id, site.se) # Saves output files meta for lfn in ofiles: file = File() file.scope = scope file.guid = getGUID(scope, lfn) file.type = 'output' file.lfn = lfn file.status = 'defined' files_.save(file) # Register file in container fc.reg_file_in_cont(file, container, 'output') # Counts files allfiles = container.files nifiles = 0 nofiles = 0 for f in allfiles: if f.type == 'input': nifiles += 1 if f.type == 'output': nofiles += 1 # Defines job meta job = Job() job.pandaid = None job.status = 'pending' job.owner = g.user job.params = jparams job.distr = distr job.container = container job.creation_time = datetime.utcnow() job.modification_time = datetime.utcnow() job.ninputfiles = nifiles job.noutputfiles = nofiles job.corecount = form.corecount.data job.tags = form.tags.data if form.tags.data != "" else None jobs_.save(job) # Async sendjob res = chord(ftasks)(async_send_job.s(jobid=job.id, siteid=site.id)) return redirect(url_for('jobs.jobs')) form.distr.choices = [("%s:%s" % (distr.name, distr.release), "%s: %s" % (distr.name, distr.version)) for distr in distrs_.find().order_by('name').order_by('version')] form.site.choices = [(site.id, "{ce}".format(ce=site.ce)) for site in sites_.find(active=1).order_by('ce')] return render_template("dashboard/jobs/new.html", form=form)
def upload(): form = request.form # Create a unique container quid for this particular batch of uploads. cguid = 'job.' + commands.getoutput('uuidgen') # Is the upload using Ajax, or a direct POST by the form? is_ajax = False if form.get("__ajax", None) == "true": is_ajax = True # Create new container container = Container() container.guid = cguid container.status = 'open' conts_.save(container) # Process files in request for upload in request.files.getlist("file"): # Define file params lfn = upload.filename.rsplit("/")[0] scope = getScope(g.user.username) guid = getGUID(scope, lfn) site = sites_.first(se=current_app.config['DEFAULT_SE']) # Target folder for these uploads. dir = '/' + os.path.join('system', scope, guid) target = site.datadir + dir try: os.makedirs(target) except: if is_ajax: return ajax_response(False, "Couldn't create upload directory: %s" % target) else: return "Couldn't create upload directory: %s" % target replfn = os.path.join(dir, lfn) destination = os.path.join(target, lfn) upload.save(destination) if os.path.isfile(destination): # Check file existence in catalog adler = adler32(destination) md5 = md5sum(destination) size = fsize(destination) file_id = ddm_checkifexists(lfn, size, adler, md5) if file_id: # If file exists file = files_.get(file_id) else: # Otherwise create new file = File() file.scope = scope file.guid = guid file.type = 'input' file.lfn = lfn file.token = '' file.status = 'defined' files_.save(file) setFileMeta(file.id, destination) replica = Replica() replica.se = site.se replica.status = 'ready' replica.lfn = replfn replica.original = file replicas_.save(replica) # Register file in container fc.reg_file_in_cont(file, container, 'input') else: return ajax_response(False, "Couldn't save file: %s" % target) if is_ajax: return ajax_response(True, cguid) else: return redirect(url_for("jobs.jobs"))
def file_save(container_guid, lfn): """ POST: /pilot/file/<container_guid>/<lfn>/save Saves file from request, returns file guid :param container_guid: Guid of container :type container_guid: str :param lfn: Local FileName :type lfn: str :return: guid :rtype: json """ site = sites_.first(se=current_app.config['DEFAULT_SE']) if ':' in container_guid: container_guid = container_guid.split(':')[-1] container = conts_.first(guid=container_guid) if container.status != 'open': raise WebpandaError('Unable to upload: Container is not open') cc = container.files ff = None for c in cc: f = c.file if f.lfn == lfn: ff = f if not ff: ff = File() ff.scope = getScope(g.user.username) ff.lfn = lfn ff.guid = getGUID(ff.scope, ff.lfn) ff.status = 'defined' files_.save(ff) # Register file in container fc.reg_file_in_cont(ff, container, 'input') path = os.path.join(site.datadir, getScope(g.user.username), container.guid) replfn = '/' + os.path.join(getScope(g.user.username), container.guid, ff.lfn) destination = os.path.join(path, ff.lfn) for r in ff.replicas: if r.se == site.se: destination = site.datadir + r.lfn file_dir = '/'.join(destination.split('/')[:-1]) if r.status == 'ready': if os.path.isfile(destination): # Check fsize, md5 or adler raise WebpandaError('Replica exists') else: r.status = 'broken' replicas_.save(r) raise WebpandaError('Broken replica') elif r.status == 'defined': try: os.makedirs(file_dir) except (Exception): pass f = open(destination, 'wb') f.write(request.data) f.close() # Update file info setFileMeta(ff.id, destination) r.status = 'ready' replicas_.save(r) return {'guid': ff.guid} else: raise WebpandaError('Replica status: %s' % r.status) replica = Replica() if os.path.isfile(destination): raise WebpandaError('Unable to upload: File exists') try: os.makedirs(path) except (Exception): _logger.debug('Path exists: %s' % path) f = open(destination, 'wb') f.write(request.data) f.close() # Update file info setFileMeta(ff.id, destination) # Create/change replica replica.se = site.se replica.status = 'ready' replica.lfn = replfn replica.token = '' replica.original = ff replicas_.save(replica) return {'guid': ff.guid}