Ejemplo n.º 1
0
def conts_list():
    """
    Get list of containers method for ajax
    :return: List of Container obj
    """
    user = g.user

    hours_limit = session.get('hours_limit', current_app.config['HOURS_LIMIT'])
    display_limit = session.get('display_limit', current_app.config['DISPLAY_LIMIT'])
    scope = getScope(user.username)
    # show users jobs
    conts = conts_.find(status='user').order_by(Container.id.desc()).limit(30)

    # prepare json
    conts_o = []
    for cont in conts:
        cont_o = {}
        cont_o['id'] = cont.id
        cont_o['guid'] = cont.guid
        cont_o['status'] = cont.status
        cont_o['n'] = len(cont.files)
        conts_o.append(cont_o)
    data = {}
    data['data'] = conts_o

    return make_response(jsonify(data), 200)
Ejemplo n.º 2
0
def container():
    """
    New container form view
    :return: Response obj
    """
    form = NewContainerForm()
    if request.method == 'POST':
        user = g.user
        scope = getScope(user.username)

        ftpdir = form.ftpdir.data

        #Create a unique container quid for this particular batch of uploads.
        cguid = 'job.' + commands.getoutput('uuidgen')

        # Create new container
        container = Container()
        container.guid = cguid
        container.status = 'open'
        conts_.save(container)

        # Check if ftpdir empty
        if ftpdir and len(ftpdir) > 0:
            async_uploadContainer.delay(ftpdir, scope, container.guid)

        return redirect(url_for('conts.cont_info', guid=container.guid))

    return render_template("dashboard/cont/cont_new.html", form=form)
Ejemplo n.º 3
0
def jobLog(id):
    """
    Returns job stdout & stderr
    :param id: Job id
    :return: json data
    """
    job = jobs_.get(id=id)
    extractLog(id)
    locdir = '/%s/.sys/%s' % (getScope(job.owner.username), job.container.guid)
    absdir = ddm_getlocalabspath(locdir)
    fout = find('payload_stdout.txt', absdir)
    ferr = find('payload_stderr.txt', absdir)
    out = ''
    err = ''
    if len(fout) > 0:
        with open(fout[0]) as f:
            out = f.read()
    if len(ferr) > 0:
        with open(ferr[0]) as f:
            err = f.read()
    data = {}
    data['id'] = id
    data['out'] = out
    data['err'] = err
    return make_response(jsonify({'data': data}), 200)
Ejemplo n.º 4
0
def files_list():
    """
    Get list of files method for ajax
    :return: List of File obj
    """
    user = g.user

    hours_limit = session.get('hours_limit', current_app.config['HOURS_LIMIT'])
    display_limit = session.get('display_limit',
                                current_app.config['DISPLAY_LIMIT'])
    scope = getScope(user.username)
    # show users jobs
    files = files_.find(scope=scope).order_by(File.id).limit(display_limit)

    # prepare json
    files_o = []
    for file in files:
        file_o = {}
        file_o['id'] = file.id
        file_o['scope'] = file.scope
        file_o['guid'] = file.guid
        file_o['type'] = file.type
        file_o['lfn'] = file.lfn
        file_o['status'] = file.status
        files_o.append(file_o)
    data = {}
    data['data'] = files_o

    return make_response(jsonify(data), 200)
Ejemplo n.º 5
0
def file_download(guid):
    """
    Get file as stream
    :param guid: guid of file
    :return: Response obj
    """
    try:
        file = files_.first(guid=guid)
    except (Exception):
        _logger.error(Exception.message)
        return make_response(jsonify({'error': 'File not found'}), 404)
    if file.scope != getScope(g.user.username):
        return make_response(jsonify({'error': 'File is not in your scope'}),
                             403)

    replicas = file.replicas
    for replica in replicas:
        if replica.se == current_app.config[
                'DEFAULT_SE'] and replica.status == 'ready':
            fullpath = current_app.config['DATA_PATH'] + replica.lfn
            f = open(fullpath, 'r')
            rr = Response(f.read(),
                          status=200,
                          content_type='application/octet-stream')
            rr.headers[
                'Content-Disposition'] = 'inline; filename="%s"' % file.lfn
            rr.headers['Content-MD5'] = file.md5sum
            file.downloaded += 1
            files_.save(file)
            return rr
    #TODO: add support message
    return make_response(jsonify({'error': 'No ready replica'}), 404)
Ejemplo n.º 6
0
def container():
    """
    New container form view
    :return: Response obj
    """
    form = NewContainerForm()
    if request.method == 'POST':
        user = g.user
        scope = getScope(user.username)

        ftpdir = form.ftpdir.data

        #Create a unique container quid for this particular batch of uploads.
        cguid = 'job.' + commands.getoutput('uuidgen')

        # Create new container
        container = Container()
        container.guid = cguid
        container.status = 'open'
        conts_.save(container)

        # Check if ftpdir empty
        if ftpdir and len(ftpdir) > 0:
            async_uploadContainer.delay(ftpdir, scope, container.guid)

        return redirect(url_for('conts.cont_info', guid=container.guid))

    return render_template("dashboard/cont/cont_new.html", form=form)
Ejemplo n.º 7
0
def conts_list():
    """
    Get list of containers method for ajax
    :return: List of Container obj
    """
    user = g.user

    hours_limit = session.get('hours_limit', current_app.config['HOURS_LIMIT'])
    display_limit = session.get('display_limit',
                                current_app.config['DISPLAY_LIMIT'])
    scope = getScope(user.username)
    # show users jobs
    conts = conts_.find(status='user').order_by(Container.id.desc()).limit(30)

    # prepare json
    conts_o = []
    for cont in conts:
        cont_o = {}
        cont_o['id'] = cont.id
        cont_o['guid'] = cont.guid
        cont_o['status'] = cont.status
        cont_o['n'] = len(cont.files)
        conts_o.append(cont_o)
    data = {}
    data['data'] = conts_o

    return make_response(jsonify(data), 200)
Ejemplo n.º 8
0
def reg_file_in_cont_byname(user, lfn, c, t):
    """
    Registers file in catalog by filename

    :param user: File owner
    :param lfn: Local FileName
    :param c: Container to register in
    :param t: Type of file (input, output, log)
    :type user: User
    :type lfn: str
    :type c: Container
    :type t: str
    :return: True/False
    """
    if not (isinstance(lfn, str) or isinstance(lfn, unicode)):
        raise Exception("Illegal lfn class: not str")
    if len(lfn) == 0:
        raise Exception("Illegal lfn length: zero")

    # Prepare File obj
    f = File()
    f.scope = getScope(user.username)
    f.attemptn = 0
    f.guid = getGUID(f.scope, None)
    f.lfn = lfn
    f.status = "defined"
    f.transfertask = None
    # f.fsize =
    # md5sum =
    # checksum =
    f.modification_time = datetime.utcnow()
    f.downloaded = 0

    # Save to fc
    files_.save(f)

    if not isinstance(f, File):
        raise Exception("Illegal file class: not File")
    if not isinstance(c, Container):
        raise Exception("Illegal catalog class: not Container")
    if not isinstance(t, str):
        raise Exception("Illegal type class: not str")
    if t not in ['input', 'output', 'log', 'intermediate']:
        raise Exception("Illegal type value: " + t)

    catalog_item = Catalog()
    catalog_item.file = f
    catalog_item.cont = c
    catalog_item.type = t
    # TODO: Add registration time

    catalog_.save(catalog_item)
    return True
Ejemplo n.º 9
0
def new_cont():
    """
    POST: /pilot/container

    Saves new container

    :return: ftp/guid
    :rtype: json
    """
    cont = Container()
    guid = 'job.' + commands.getoutput('uuidgen')

    cont.guid = guid
    cont.status = 'open'
    conts_.save(cont)

    url = '%s/%s' % (current_app.config['FTP'], guid)
    os.makedirs(os.path.join(current_app.config['UPLOAD_FOLDER'], getScope(g.user.username), cont.guid))
    return {'ftp': url, 'guid': cont.guid}
Ejemplo n.º 10
0
def cont_close(guid):
    """
    POST: /pilot/container/<guid>/close

    Changes container status to 'close'

    :param guid: Container guid
    :type guid: str
    """
    cont = conts_.first(guid=guid)
    if cont is None:
        raise WebpandaError("Container not found")

    path = os.path.join(current_app.config['UPLOAD_FOLDER'], getScope(g.user.username), cont.guid)
    os.path.walk(path, registerLocalFile, cont.guid)

    cont.status = 'close'
    conts_.save(cont)
    return {'response': 'Container status: close'}
Ejemplo n.º 11
0
def cont_close(guid):
    """
    POST: /pilot/container/<guid>/close

    Changes container status to 'close'

    :param guid: Container guid
    :type guid: str
    """
    cont = conts_.first(guid=guid)
    if cont is None:
        raise WebpandaError("Container not found")

    path = os.path.join(current_app.config['UPLOAD_FOLDER'],
                        getScope(g.user.username), cont.guid)
    os.path.walk(path, registerLocalFile, cont.guid)

    cont.status = 'close'
    conts_.save(cont)
    return {'response': 'Container status: close'}
Ejemplo n.º 12
0
def new_cont():
    """
    POST: /pilot/container

    Saves new container

    :return: ftp/guid
    :rtype: json
    """
    cont = Container()
    guid = 'job.' + commands.getoutput('uuidgen')

    cont.guid = guid
    cont.status = 'open'
    conts_.save(cont)

    url = '%s/%s' % (current_app.config['FTP'], guid)
    os.makedirs(
        os.path.join(current_app.config['UPLOAD_FOLDER'],
                     getScope(g.user.username), cont.guid))
    return {'ftp': url, 'guid': cont.guid}
Ejemplo n.º 13
0
def jobLogAPI(id):
    """Returns job stdout & stderr"""
    g.user = request.oauth.user

    job = jobs_.get(id)
    extractLog(id)
    locdir = '/%s/.sys/%s' % (getScope(job.owner.username), job.container.guid)
    absdir = ddm_getlocalabspath(locdir)
    fout = find('payload.stdout', absdir)
    ferr = find('payload.stderr', absdir)
    out = ''
    err = ''
    if len(fout) > 0:
        with open(fout[0]) as f:
            out = f.read()
    if len(ferr) > 0:
        with open(ferr[0]) as f:
            err = f.read()
    data = dict()
    data['id'] = id
    data['out'] = out
    data['err'] = err
    return data
Ejemplo n.º 14
0
def jobLogAPI(id):
    """Returns job stdout & stderr"""
    g.user = request.oauth.user

    job = jobs_.get(id)
    extractLog(id)
    locdir = '/%s/.sys/%s' % (getScope(job.owner.username), job.container.guid)
    absdir = ddm_getlocalabspath(locdir)
    fout = find('payload.stdout', absdir)
    ferr = find('payload.stderr', absdir)
    out = ''
    err = ''
    if len(fout) > 0:
        with open(fout[0]) as f:
            out = f.read()
    if len(ferr) > 0:
        with open(ferr[0]) as f:
            err = f.read()
    data = dict()
    data['id'] = id
    data['out'] = out
    data['err'] = err
    return data
Ejemplo n.º 15
0
def new_file(user, lfn):
    """
    Creates new file object

    :param user: File owner
    :param lfn: Local FileName
    :type user: User
    :type lfn: str
    :return: Created file
    :rtype: File
    """
    if not isinstance(user, User):
        raise Exception("Illegal user class: not User")
    if not isinstance(lfn, str):
        raise Exception("Illegal lfn class: not str")
    if len(lfn) == 0:
        raise Exception("Illegal lfn length: zero")

    # Prepare File obj
    f = File()
    f.scope = getScope(user.username)
    f.attemptn = 0
    f.guid = getGUID(f.scope, None)
    f.lfn = lfn
    f.status = "defined"
    f.transfertask = None
    # f.fsize =
    # md5sum =
    # checksum =
    f.modification_time = datetime.utcnow()
    f.downloaded = 0

    # Save to fc
    files_.save(f)

    return f
Ejemplo n.º 16
0
def send_job(jobid, siteid):
    _logger.debug('Jobid: ' + str(jobid))

    site = sites_.get(siteid)

    job = jobs_.get(int(jobid))
    cont = job.container
    files_catalog = cont.files

    fscope = getScope(job.owner.username)
    datasetName = '{}:{}'.format(fscope, cont.guid)

    distributive = job.distr.name
    release = job.distr.release

    # Prepare runScript
    parameters = job.distr.command
    parameters = parameters.replace("$COMMAND$", job.params)
    parameters = parameters.replace("$USERNAME$", job.owner.username)
    parameters = parameters.replace("$WORKINGGROUP$", job.owner.working_group)

    # Prepare metadata
    metadata = dict(user=job.owner.username)

    # Prepare PanDA Object
    pandajob = JobSpec()
    pandajob.jobDefinitionID = int(time.time()) % 10000
    pandajob.jobName = cont.guid
    pandajob.transformation = client_config.DEFAULT_TRF
    pandajob.destinationDBlock = datasetName
    pandajob.destinationSE = site.se
    pandajob.currentPriority = 1000
    pandajob.prodSourceLabel = 'user'
    pandajob.computingSite = site.ce
    pandajob.cloud = 'RU'
    pandajob.VO = 'atlas'
    pandajob.prodDBlock = "%s:%s" % (fscope, pandajob.jobName)
    pandajob.coreCount = job.corecount
    pandajob.metadata = json.dumps(metadata)
    #pandajob.workingGroup = job.owner.working_group

    if site.encode_commands:
        # It requires script wrapper on cluster side
        pandajob.jobParameters = '%s %s %s "%s"' % (cont.guid, release,
                                                    distributive, parameters)
    else:
        pandajob.jobParameters = parameters

    has_input = False
    for fcc in files_catalog:
        if fcc.type == 'input':
            f = fcc.file
            guid = f.guid
            fileIT = FileSpec()
            fileIT.lfn = f.lfn
            fileIT.dataset = pandajob.prodDBlock
            fileIT.prodDBlock = pandajob.prodDBlock
            fileIT.type = 'input'
            fileIT.scope = fscope
            fileIT.status = 'ready'
            fileIT.GUID = guid
            pandajob.addFile(fileIT)

            has_input = True
        if fcc.type == 'output':
            f = fcc.file
            fileOT = FileSpec()
            fileOT.lfn = f.lfn
            fileOT.destinationDBlock = pandajob.prodDBlock
            fileOT.destinationSE = pandajob.destinationSE
            fileOT.dataset = pandajob.prodDBlock
            fileOT.type = 'output'
            fileOT.scope = fscope
            fileOT.GUID = f.guid
            pandajob.addFile(fileOT)

            # Save replica meta
            fc.new_replica(f, site)

    if not has_input:
        # Add fake input
        fileIT = FileSpec()
        fileIT.lfn = "fake.input"
        fileIT.dataset = pandajob.prodDBlock
        fileIT.prodDBlock = pandajob.prodDBlock
        fileIT.type = 'input'
        fileIT.scope = fscope
        fileIT.status = 'ready'
        fileIT.GUID = "fake.guid"
        pandajob.addFile(fileIT)

    # Prepare lof file
    fileOL = FileSpec()
    fileOL.lfn = "%s.log.tgz" % pandajob.jobName
    fileOL.destinationDBlock = pandajob.destinationDBlock
    fileOL.destinationSE = pandajob.destinationSE
    fileOL.dataset = '{}:logs'.format(fscope)
    fileOL.type = 'log'
    fileOL.scope = 'panda'
    pandajob.addFile(fileOL)

    # Save log meta
    log = File()
    log.scope = fscope
    log.lfn = fileOL.lfn
    log.guid = getGUID(log.scope, log.lfn)
    log.type = 'log'
    log.status = 'defined'
    files_.save(log)

    # Save replica meta
    fc.new_replica(log, site)

    # Register file in container
    fc.reg_file_in_cont(log, cont, 'log')

    # Submit job
    o = submitJobs([pandajob])
    x = o[0]

    try:
        #update PandaID
        PandaID = int(x[0])
        job.pandaid = PandaID
        job.ce = site.ce
    except:
        job.status = 'submit_error'
    jobs_.save(job)

    return 0
Ejemplo n.º 17
0
def new_job():
    """Creates new job
    """
    g.user = request.oauth.user
    scope = getScope(request.oauth.user.username)

    js = request.json
    data = js['data']

    distr_id = data['sw_id']
    params = data['script']
    corecount = data['cores']

    site = sites_.first(ce=current_app.config['DEFAULT_CE'])
    distr = distrs_.get(id)

    container = Container()
    guid = 'job.' + commands.getoutput('uuidgen')
    container.guid = guid
    container.status = 'open'
    conts_.save(container)

    # Process ftp files
    if 'ftp_dir' in data.keys():
        ftp_dir = data['ftp_dir']
        register_ftp_files(ftp_dir, scope, container.guid)

    # Process guid list
    if 'guids' in data.keys():
        guids = data['guids']
        for f in guids:
            if f != '':
                file_ = files_.first(guid=f)
                if file_ is not None:
                    # Register file in catalog
                    fc.reg_file_in_cont(file_, container, 'input')
                else:
                    raise WebpandaError('File with guid %s not found' % f)

    ofiles = ['results.tgz']

    # Starts cloneReplica tasks
    ftasks = prepareInputFiles(container.id, site.se)

    # Saves output files meta
    for lfn in ofiles:
        file = File()
        file.scope = scope
        file.guid = getGUID(scope, lfn)
        file.lfn = lfn
        file.status = 'defined'
        files_.save(file)

        # Register file in catalog
        fc.reg_file_in_cont(file, container, 'output')

    # Counts files
    allfiles = container.files
    nifiles = 0
    nofiles = 0
    for f in allfiles:
        if f.type == 'input':
            nifiles += 1
        if f.type == 'output':
            nofiles += 1

    # Defines job meta
    job = Job()
    job.pandaid = None
    job.status = 'pending'
    job.owner = request.oauth.user
    job.params = params
    job.distr = distr
    job.container = container
    job.creation_time = datetime.utcnow()
    job.modification_time = datetime.utcnow()
    job.ninputfiles = nifiles
    job.noutputfiles = nofiles
    job.corecount = corecount
    job.tags = data['tags'] if 'tags' in data.keys() else ""
    jobs_.save(job)

    # Async sendjob
    res = chord(ftasks)(async_send_job.s(jobid=job.id, siteid=site.id))
    return {'id': job.id, 'container_id': guid}
Ejemplo n.º 18
0
def upload():
    form = request.form

    # Create a unique container quid for this particular batch of uploads.
    cguid = 'job.' + commands.getoutput('uuidgen')

    # Is the upload using Ajax, or a direct POST by the form?
    is_ajax = False
    if form.get("__ajax", None) == "true":
        is_ajax = True

    # Create new container
    container = Container()
    container.guid = cguid
    container.status = 'open'
    conts_.save(container)

    # Process files in request
    for upload in request.files.getlist("file"):
        # Define file params
        lfn = upload.filename.rsplit("/")[0]
        scope = getScope(g.user.username)
        guid = getGUID(scope, lfn)
        site = sites_.first(se=current_app.config['DEFAULT_SE'])

        # Target folder for these uploads.
        dir = '/' + os.path.join('system', scope, guid)
        target = site.datadir + dir
        try:
            os.makedirs(target)
        except:
            if is_ajax:
                return ajax_response(False, "Couldn't create upload directory: %s" % target)
            else:
                return "Couldn't create upload directory: %s" % target

        replfn = os.path.join(dir, lfn)
        destination = os.path.join(target, lfn)
        upload.save(destination)

        if os.path.isfile(destination):
            # Check file existence in catalog
            adler = adler32(destination)
            md5 = md5sum(destination)
            size = fsize(destination)
            file_id = ddm_checkifexists(lfn, size, adler, md5)

            if file_id:
                # If file exists
                file = files_.get(file_id)
            else:
                # Otherwise create new
                file = File()
                file.scope = scope
                file.guid = guid
                file.type = 'input'
                file.lfn = lfn
                file.token = ''
                file.status = 'defined'
                files_.save(file)
                setFileMeta(file.id, destination)

                replica = Replica()
                replica.se = site.se
                replica.status = 'ready'
                replica.lfn = replfn
                replica.original = file
                replicas_.save(replica)

            # Register file in container
            fc.reg_file_in_cont(file, container, 'input')

        else:
            return ajax_response(False, "Couldn't save file: %s" % target)

    if is_ajax:
        return ajax_response(True, cguid)
    else:
        return redirect(url_for("jobs.jobs"))
Ejemplo n.º 19
0
def send_job(jobid, siteid):
    _logger.debug('Jobid: ' + str(jobid))

    site = sites_.get(siteid)

    job = jobs_.get(int(jobid))
    cont = job.container
    files_catalog = cont.files

    fscope = getScope(job.owner.username)
    datasetName = '{}:{}'.format(fscope, cont.guid)

    distributive = job.distr.name
    release = job.distr.release

    # Prepare runScript
    parameters = job.distr.command
    parameters = parameters.replace("$COMMAND$", job.params)
    parameters = parameters.replace("$USERNAME$", job.owner.username)
    parameters = parameters.replace("$WORKINGGROUP$", job.owner.working_group)

    # Prepare metadata
    metadata = dict(user=job.owner.username)

    # Prepare PanDA Object
    pandajob = JobSpec()
    pandajob.jobDefinitionID = int(time.time()) % 10000
    pandajob.jobName = cont.guid
    pandajob.transformation = client_config.DEFAULT_TRF
    pandajob.destinationDBlock = datasetName
    pandajob.destinationSE = site.se
    pandajob.currentPriority = 1000
    pandajob.prodSourceLabel = 'user'
    pandajob.computingSite = site.ce
    pandajob.cloud = 'RU'
    pandajob.VO = 'atlas'
    pandajob.prodDBlock = "%s:%s" % (fscope, pandajob.jobName)
    pandajob.coreCount = job.corecount
    pandajob.metadata = json.dumps(metadata)
    #pandajob.workingGroup = job.owner.working_group

    if site.encode_commands:
        # It requires script wrapper on cluster side
        pandajob.jobParameters = '%s %s %s "%s"' % (cont.guid, release, distributive, parameters)
    else:
        pandajob.jobParameters = parameters


    has_input = False
    for fcc in files_catalog:
        if fcc.type == 'input':
            f = fcc.file
            guid = f.guid
            fileIT = FileSpec()
            fileIT.lfn = f.lfn
            fileIT.dataset = pandajob.prodDBlock
            fileIT.prodDBlock = pandajob.prodDBlock
            fileIT.type = 'input'
            fileIT.scope = fscope
            fileIT.status = 'ready'
            fileIT.GUID = guid
            pandajob.addFile(fileIT)

            has_input = True
        if fcc.type == 'output':
            f = fcc.file
            fileOT = FileSpec()
            fileOT.lfn = f.lfn
            fileOT.destinationDBlock = pandajob.prodDBlock
            fileOT.destinationSE = pandajob.destinationSE
            fileOT.dataset = pandajob.prodDBlock
            fileOT.type = 'output'
            fileOT.scope = fscope
            fileOT.GUID = f.guid
            pandajob.addFile(fileOT)

            # Save replica meta
            fc.new_replica(f, site)

    if not has_input:
        # Add fake input
        fileIT = FileSpec()
        fileIT.lfn = "fake.input"
        fileIT.dataset = pandajob.prodDBlock
        fileIT.prodDBlock = pandajob.prodDBlock
        fileIT.type = 'input'
        fileIT.scope = fscope
        fileIT.status = 'ready'
        fileIT.GUID = "fake.guid"
        pandajob.addFile(fileIT)

    # Prepare lof file
    fileOL = FileSpec()
    fileOL.lfn = "%s.log.tgz" % pandajob.jobName
    fileOL.destinationDBlock = pandajob.destinationDBlock
    fileOL.destinationSE = pandajob.destinationSE
    fileOL.dataset = '{}:logs'.format(fscope)
    fileOL.type = 'log'
    fileOL.scope = 'panda'
    pandajob.addFile(fileOL)

    # Save log meta
    log = File()
    log.scope = fscope
    log.lfn = fileOL.lfn
    log.guid = getGUID(log.scope, log.lfn)
    log.type = 'log'
    log.status = 'defined'
    files_.save(log)

    # Save replica meta
    fc.new_replica(log, site)

    # Register file in container
    fc.reg_file_in_cont(log, cont, 'log')

    # Submit job
    o = submitJobs([pandajob])
    x = o[0]

    try:
        #update PandaID
        PandaID = int(x[0])
        job.pandaid = PandaID
        job.ce = site.ce
    except:
        job.status = 'submit_error'
    jobs_.save(job)

    return 0
Ejemplo n.º 20
0
def job():
    """
    New job form view
    :return: Response obj
    """
    form = NewJobForm(request.form)
    if request.method == 'POST':
        site = sites_.get(int(form.site.data))
        distr_name, distr_release = form.distr.data.split(':')
        distr = distrs_.first(name=distr_name, release=int(distr_release))

        container_guid = form.container.data
        try:
            container = conts_.first(guid=container_guid)
        except(Exception):
            _logger.error(Exception.message)
            return make_response(jsonify({'error': 'Container not found'}), 404)

        if site.encode_commands:
            # By default frontend encodes with base64 job script parts separated by ";"
            # It requires script wrapper on cluster side
            jparams = form.params.data
        else:
            # Set site.encode_commands as False if you want to send command string without base64 encoding
            jparams = ';'.join([b64decode(command) for command in form.params.data.split(';')])

        ifiles = request.form.getlist('ifiles[]')
        iguids = request.form.getlist('iguids[]')
        iconts = request.form.getlist('iconts[]')
        ofiles = ['{guid}.out.tgz'.format(guid=container.guid)]

        scope = getScope(g.user.username)

        # Process ftp files
        ftp_dir = form.ftpdir.data
        register_ftp_files(ftp_dir, scope, container.guid)

        # Process guid list
        for f in iguids:
            if f != '':
                file = files_.first(guid=f)
                if file is not None:
                    # Register files in container
                    fc.reg_file_in_cont(file, container, 'input')
                else:
                    return make_response(jsonify({'error': "GUID {} not found".format(f)}))

        # Process containers
        for c in iconts:
            if c != '':
                try:
                    form_cont = conts_.first(guid=c)
                except(Exception):
                    _logger.error(Exception.message)
                    return make_response(jsonify({'error': 'Container in form not found'}), 404)
                for f in form_cont.files:
                    # Register file in catalog
                    fc.reg_file_in_cont(f.file, container, 'input')

        # Processes urls
        for f in ifiles:
            if f != '':
                from_se, path, token = getUrlInfo(f)
                replfn = ':/'.join([from_se, path])

                # Check if used before
                file_id = ddm_checkexternalifexists('', replfn)
                if file_id:
                    file = files_.get(file_id)
                else:
                    lfn = path.split('/')[-1]
                    guid = getGUID(scope, lfn)

                    file = File()
                    file.scope = scope
                    file.guid = guid
                    file.type = 'input'
                    file.lfn = lfn
                    file.status = 'defined'
                    files_.save(file)

                    replica = Replica()
                    replica.se = from_se
                    replica.status = 'link'
                    # Separate url & token
                    replica.lfn = replfn
                    replica.token = token
                    replica.original = file
                    replicas_.save(replica)

                # Register file in container
                fc.reg_file_in_cont(file, container, 'input')

        # Starts cloneReplica tasks
        ftasks = prepareInputFiles(container.id, site.se)

        # Saves output files meta
        for lfn in ofiles:
            file = File()
            file.scope = scope
            file.guid = getGUID(scope, lfn)
            file.type = 'output'
            file.lfn = lfn
            file.status = 'defined'
            files_.save(file)

            # Register file in container
            fc.reg_file_in_cont(file, container, 'output')

        # Counts files
        allfiles = container.files
        nifiles = 0
        nofiles = 0
        for f in allfiles:
            if f.type == 'input':
                nifiles += 1
            if f.type == 'output':
                nofiles += 1

        # Defines job meta
        job = Job()
        job.pandaid = None
        job.status = 'pending'
        job.owner = g.user
        job.params = jparams
        job.distr = distr
        job.container = container
        job.creation_time = datetime.utcnow()
        job.modification_time = datetime.utcnow()
        job.ninputfiles = nifiles
        job.noutputfiles = nofiles
        job.corecount = form.corecount.data
        job.tags = form.tags.data if form.tags.data != "" else None
        jobs_.save(job)

        # Async sendjob
        res = chord(ftasks)(async_send_job.s(jobid=job.id, siteid=site.id))

        return redirect(url_for('jobs.jobs'))

    form.distr.choices = [("%s:%s" % (distr.name, distr.release), "%s: %s" % (distr.name, distr.version)) for distr in distrs_.find().order_by('name').order_by('version')]
    form.site.choices = [(site.id, "{ce}".format(ce=site.ce)) for site in sites_.find(active=1).order_by('ce')]
    return render_template("dashboard/jobs/new.html", form=form)
Ejemplo n.º 21
0
def upload():
    form = request.form

    # Create a unique container quid for this particular batch of uploads.
    cguid = 'job.' + commands.getoutput('uuidgen')

    # Is the upload using Ajax, or a direct POST by the form?
    is_ajax = False
    if form.get("__ajax", None) == "true":
        is_ajax = True

    # Create new container
    container = Container()
    container.guid = cguid
    container.status = 'open'
    conts_.save(container)

    # Process files in request
    for upload in request.files.getlist("file"):
        # Define file params
        lfn = upload.filename.rsplit("/")[0]
        scope = getScope(g.user.username)
        guid = getGUID(scope, lfn)
        site = sites_.first(se=current_app.config['DEFAULT_SE'])

        # Target folder for these uploads.
        dir = '/' + os.path.join('system', scope, guid)
        target = site.datadir + dir
        try:
            os.makedirs(target)
        except:
            if is_ajax:
                return ajax_response(
                    False, "Couldn't create upload directory: %s" % target)
            else:
                return "Couldn't create upload directory: %s" % target

        replfn = os.path.join(dir, lfn)
        destination = os.path.join(target, lfn)
        upload.save(destination)

        if os.path.isfile(destination):
            # Check file existence in catalog
            adler = adler32(destination)
            md5 = md5sum(destination)
            size = fsize(destination)
            file_id = ddm_checkifexists(lfn, size, adler, md5)

            if file_id:
                # If file exists
                file = files_.get(file_id)
            else:
                # Otherwise create new
                file = File()
                file.scope = scope
                file.guid = guid
                file.type = 'input'
                file.lfn = lfn
                file.token = ''
                file.status = 'defined'
                files_.save(file)
                setFileMeta(file.id, destination)

                replica = Replica()
                replica.se = site.se
                replica.status = 'ready'
                replica.lfn = replfn
                replica.original = file
                replicas_.save(replica)

            # Register file in container
            fc.reg_file_in_cont(file, container, 'input')

        else:
            return ajax_response(False, "Couldn't save file: %s" % target)

    if is_ajax:
        return ajax_response(True, cguid)
    else:
        return redirect(url_for("jobs.jobs"))
Ejemplo n.º 22
0
def file_save(container_guid, lfn):
    """
    POST: /pilot/file/<container_guid>/<lfn>/save

    Saves file from request, returns file guid

    :param container_guid: Guid of container
    :type container_guid: str
    :param lfn: Local FileName
    :type lfn: str
    :return: guid
    :rtype: json
    """
    site = sites_.first(se=current_app.config['DEFAULT_SE'])

    if ':' in container_guid:
        container_guid = container_guid.split(':')[-1]
    container = conts_.first(guid=container_guid)
    if container.status != 'open':
        raise WebpandaError('Unable to upload: Container is not open')
    cc = container.files

    ff = None
    for c in cc:
        f = c.file
        if f.lfn == lfn:
            ff = f
    if not ff:
        ff = File()
        ff.scope = getScope(g.user.username)
        ff.lfn = lfn
        ff.guid = getGUID(ff.scope, ff.lfn)
        ff.status = 'defined'
        files_.save(ff)

        # Register file in container
        fc.reg_file_in_cont(ff, container, 'input')

    path = os.path.join(site.datadir, getScope(g.user.username), container.guid)
    replfn = '/' + os.path.join(getScope(g.user.username), container.guid, ff.lfn)
    destination = os.path.join(path, ff.lfn)

    for r in ff.replicas:
        if r.se == site.se:
            destination = site.datadir + r.lfn
            file_dir = '/'.join(destination.split('/')[:-1])
            if r.status == 'ready':
                if os.path.isfile(destination):  # Check fsize, md5 or adler
                    raise WebpandaError('Replica exists')
                else:
                    r.status = 'broken'
                    replicas_.save(r)
                    raise WebpandaError('Broken replica')
            elif r.status == 'defined':
                try:
                    os.makedirs(file_dir)
                except(Exception):
                    pass
                f = open(destination, 'wb')
                f.write(request.data)
                f.close()

                # Update file info
                setFileMeta(ff.id, destination)

                r.status = 'ready'
                replicas_.save(r)
                return {'guid': ff.guid}
            else:
                raise WebpandaError('Replica status: %s' % r.status)


    replica = Replica()
    if os.path.isfile(destination):
        raise WebpandaError('Unable to upload: File exists')
    try:
        os.makedirs(path)
    except(Exception):
        _logger.debug('Path exists: %s' % path)
    f = open(destination, 'wb')
    f.write(request.data)
    f.close()

    # Update file info
    setFileMeta(ff.id, destination)

    # Create/change replica
    replica.se = site.se
    replica.status = 'ready'
    replica.lfn = replfn
    replica.token = ''
    replica.original = ff
    replicas_.save(replica)
    return {'guid': ff.guid}
Ejemplo n.º 23
0
def file_save(container_guid, lfn):
    """
    POST: /pilot/file/<container_guid>/<lfn>/save

    Saves file from request, returns file guid

    :param container_guid: Guid of container
    :type container_guid: str
    :param lfn: Local FileName
    :type lfn: str
    :return: guid
    :rtype: json
    """
    site = sites_.first(se=current_app.config['DEFAULT_SE'])

    if ':' in container_guid:
        container_guid = container_guid.split(':')[-1]
    container = conts_.first(guid=container_guid)
    if container.status != 'open':
        raise WebpandaError('Unable to upload: Container is not open')
    cc = container.files

    ff = None
    for c in cc:
        f = c.file
        if f.lfn == lfn:
            ff = f
    if not ff:
        ff = File()
        ff.scope = getScope(g.user.username)
        ff.lfn = lfn
        ff.guid = getGUID(ff.scope, ff.lfn)
        ff.status = 'defined'
        files_.save(ff)

        # Register file in container
        fc.reg_file_in_cont(ff, container, 'input')

    path = os.path.join(site.datadir, getScope(g.user.username),
                        container.guid)
    replfn = '/' + os.path.join(getScope(g.user.username), container.guid,
                                ff.lfn)
    destination = os.path.join(path, ff.lfn)

    for r in ff.replicas:
        if r.se == site.se:
            destination = site.datadir + r.lfn
            file_dir = '/'.join(destination.split('/')[:-1])
            if r.status == 'ready':
                if os.path.isfile(destination):  # Check fsize, md5 or adler
                    raise WebpandaError('Replica exists')
                else:
                    r.status = 'broken'
                    replicas_.save(r)
                    raise WebpandaError('Broken replica')
            elif r.status == 'defined':
                try:
                    os.makedirs(file_dir)
                except (Exception):
                    pass
                f = open(destination, 'wb')
                f.write(request.data)
                f.close()

                # Update file info
                setFileMeta(ff.id, destination)

                r.status = 'ready'
                replicas_.save(r)
                return {'guid': ff.guid}
            else:
                raise WebpandaError('Replica status: %s' % r.status)

    replica = Replica()
    if os.path.isfile(destination):
        raise WebpandaError('Unable to upload: File exists')
    try:
        os.makedirs(path)
    except (Exception):
        _logger.debug('Path exists: %s' % path)
    f = open(destination, 'wb')
    f.write(request.data)
    f.close()

    # Update file info
    setFileMeta(ff.id, destination)

    # Create/change replica
    replica.se = site.se
    replica.status = 'ready'
    replica.lfn = replfn
    replica.token = ''
    replica.original = ff
    replicas_.save(replica)
    return {'guid': ff.guid}
Ejemplo n.º 24
0
def new_job():
    """Creates new job
    """
    g.user = request.oauth.user
    scope = getScope(request.oauth.user.username)

    js = request.json
    data = js['data']

    distr_id = data['sw_id']
    params = data['script']
    corecount = data['cores']

    site = sites_.first(ce=current_app.config['DEFAULT_CE'])
    distr = distrs_.get(id)

    container = Container()
    guid = 'job.' + commands.getoutput('uuidgen')
    container.guid = guid
    container.status = 'open'
    conts_.save(container)

    # Process ftp files
    if 'ftp_dir' in data.keys():
        ftp_dir = data['ftp_dir']
        register_ftp_files(ftp_dir, scope, container.guid)

    # Process guid list
    if 'guids' in data.keys():
        guids = data['guids']
        for f in guids:
            if f != '':
                file_ = files_.first(guid=f)
                if file_ is not None:
                    # Register file in catalog
                    fc.reg_file_in_cont(file_, container, 'input')
                else:
                    raise WebpandaError('File with guid %s not found' % f)

    ofiles = ['results.tgz']

    # Starts cloneReplica tasks
    ftasks = prepareInputFiles(container.id, site.se)

    # Saves output files meta
    for lfn in ofiles:
        file = File()
        file.scope = scope
        file.guid = getGUID(scope, lfn)
        file.lfn = lfn
        file.status = 'defined'
        files_.save(file)

        # Register file in catalog
        fc.reg_file_in_cont(file, container, 'output')

    # Counts files
    allfiles = container.files
    nifiles = 0
    nofiles = 0
    for f in allfiles:
        if f.type == 'input':
            nifiles += 1
        if f.type == 'output':
            nofiles += 1

    # Defines job meta
    job = Job()
    job.pandaid = None
    job.status = 'pending'
    job.owner = request.oauth.user
    job.params = params
    job.distr = distr
    job.container = container
    job.creation_time = datetime.utcnow()
    job.modification_time = datetime.utcnow()
    job.ninputfiles = nifiles
    job.noutputfiles = nofiles
    job.corecount = corecount
    job.tags = data['tags'] if 'tags' in data.keys() else ""
    jobs_.save(job)

    # Async sendjob
    res = chord(ftasks)(async_send_job.s(jobid=job.id, siteid=site.id))
    return {'id': job.id, 'container_id': guid}