예제 #1
0
def handle_proxy(proxy_string, client_id, config):
    """If ARC-enabled server: store a proxy certificate.
       Arguments: proxy_string - text  extracted from given upload 
                  client_id  - DN for user just being created 
                  config     - global configuration
    """

    output = []
    client_dir = client_id_dir(client_id)
    proxy_dir = os.path.join(config.user_home, client_dir)
    proxy_path = os.path.join(config.user_home, client_dir, arc.Ui.proxy_name)

    if not config.arc_clusters:
        output.append({'object_type': 'error_text', 'text':
                       'No ARC support!'})
        return output

    # store the file
    try:
        write_file(proxy_string, proxy_path, config.logger)
        os.chmod(proxy_path, 0600)
    except Exception, exc:
        output.append({'object_type': 'error_text', 'text'
                              : 'Proxy file could not be written (%s)!'
                               % str(exc).replace(proxy_dir, '')})
        return output
예제 #2
0
def handle_proxy(proxy_string, client_id, config):
    """If ARC-enabled server: store a proxy certificate.
       Arguments: proxy_string - text  extracted from given upload
                  client_id  - DN for user just being created
                  config     - global configuration
    """

    output = []
    client_dir = client_id_dir(client_id)
    proxy_dir = os.path.join(config.user_home, client_dir)
    proxy_path = os.path.join(config.user_home, client_dir, arc.Ui.proxy_name)

    if not config.arc_clusters:
        output.append({'object_type': 'error_text', 'text': 'No ARC support!'})
        return output

    # store the file

    try:
        write_file(proxy_string, proxy_path, config.logger)
        os.chmod(proxy_path, 0600)
    except Exception, exc:
        output.append({
            'object_type':
            'error_text',
            'text':
            'Proxy file could not be written (%s)!' %
            str(exc).replace(proxy_dir, '')
        })
        return output
예제 #3
0
def main(client_id, user_arguments_dict):
    """Main function used by front end"""

    (configuration, logger, output_objects, op_name) = \
        initialize_main_variables(client_id, op_header=False)
    client_dir = client_id_dir(client_id)
    defaults = signature()[1]
    (validate_status, accepted) = validate_input_and_cert(
        user_arguments_dict,
        defaults,
        output_objects,
        client_id,
        configuration,
        allow_rejects=False,
        )
    if not validate_status:
        return (accepted, returnvalues.CLIENT_ERROR)

    if not correct_handler('POST'):
        output_objects.append(
            {'object_type': 'error_text', 'text'
             : 'Only accepting POST requests to prevent unintended updates'})
        return (output_objects, returnvalues.CLIENT_ERROR)

    unique_resource_name = accepted['unique_resource_name'][-1]
    resconfig = accepted['resconfig'][-1]

    output_objects.append({'object_type': 'header', 'text'
                          : 'Trying to Update resource configuration'})

    if not is_owner(client_id, unique_resource_name,
                    configuration.resource_home, logger):
        logger.error(client_id + ' is not an owner of '
                      + unique_resource_name + ': update rejected!')
        output_objects.append({'object_type': 'error_text', 'text'
                              : 'You must be an owner of '
                               + unique_resource_name
                               + ' to update the configuration!'})
        return (output_objects, returnvalues.CLIENT_ERROR)

    # TODO: race if two confs are uploaded concurrently!

    host_url, host_identifier = unique_resource_name.rsplit('.', 1)
    pending_file = os.path.join(configuration.resource_home,
                            unique_resource_name, 'config.tmp')

    # write new proposed config file to disk
    try:
        logger.info('write to file: %s' % pending_file)
        if not write_file(resconfig, pending_file, logger):
                output_objects.append({'object_type': 'error_text',
                        'text': 'Could not write: %s' % pending_file})
                return (output_objects, returnvalues.SYSTEM_ERROR)
    except Exception, err:
        logger.error('Resource conf %s could not be written: %s' % \
                     (pending_file, err))
        output_objects.append({'object_type': 'error_text', 'text':
                               'Could not write configuration!'})
        return (output_objects, returnvalues.SYSTEM_ERROR)
예제 #4
0
    def write_xml(self, filename):
        """ Writes the Usage Record to a file as XML """

        try:
            xml = self.generate_tree()
            result = write_file(xml, filename, self.__logger)
        except Exception, err:
            self.__logger.error('Unable to write XML file: %s' % err)
예제 #5
0
    def write_xml(self, filename):
        """ Writes the Usage Record to a file as XML """

        try:
            xml = self.generate_tree()
            result = write_file(xml, filename, self.__logger)
        except Exception, err:
            self.__logger.error('Unable to write XML file: %s' % err)
예제 #6
0
def create_verify_files(types, re_name, re_dict, base_dir, logger):
    """Create runtime env test files"""
    for ver_type in types:
        if re_dict.has_key('VERIFY%s' % ver_type.upper()):
            if re_dict['VERIFY%s' % ver_type.upper()] != []:
                file_content = ''
                for line in re_dict['VERIFY%s' % ver_type.upper()]:
                    file_content += line + '\n'
                if not write_file(
                        file_content.strip(), '%sverify_runtime_env_%s.%s' %
                    (base_dir, re_name, ver_type.lower()), logger):
                    raise Exception('could not write test job %s' % \
                                    ver_type.upper())
예제 #7
0
def create_verify_files(types, re_name, re_dict, base_dir, logger):
    """Create runtime env test files"""
    for ver_type in types:
        if re_dict.has_key('VERIFY%s' % ver_type.upper()):
            if re_dict['VERIFY%s' % ver_type.upper()] != []:
                file_content = ''
                for line in re_dict['VERIFY%s' % ver_type.upper()]:
                    file_content += line + '\n'
                if not write_file(file_content.strip(),
                                  '%sverify_runtime_env_%s.%s'
                                   % (base_dir, re_name,
                                  ver_type.lower()), logger):
                    raise Exception('could not write test job %s' % \
                                    ver_type.upper())
예제 #8
0
    except Exception, err:
        # error during translation, pass a message
        logger.error("Error during xRSL translation: %s" % err.__str__())
        return (None, err.__str__())

        # we submit directly from here (the other version above does
        # copyFileToResource and gen_job_script generates all files)

    # we have to put the generated script somewhere..., and submit from there.
    # inputfiles are given by the user as relative paths from his home,
    # so we should use that location (and clean up afterwards).

    # write script (to user home)
    user_home = os.path.join(configuration.user_home, client_dir)
    script_path = os.path.abspath(os.path.join(user_home, script_name))
    write_file(script, script_path, logger)

    os.chdir(user_home)

    try:
        logger.debug("submitting job to ARC")
        session = arc.Ui(user_home)
        arc_job_ids = session.submit(xrsl)

        # if no exception occurred, we are done:

        job_dict["ARCID"] = arc_job_ids[0]
        job_dict["SESSIONID"] = sessionid

        msg = "OK"
        result = job_dict
예제 #9
0
파일: archives.py 프로젝트: heromod/migrid
def handle_package_upload(
    real_src,
    relative_src,
    client_id,
    configuration,
    submit_mrslfiles,
    dst,
    ):
    """A file package was uploaded (eg. .zip file). Extract the content and
    submit mrsl files if submit_mrsl_files is True.
    """
    logger = configuration.logger
    msg = ''
    status = True

    logger.info("handle_package_upload %s %s %s" % \
                (real_src, relative_src, dst))

    client_dir = client_id_dir(client_id)

    # Please note that base_dir must end in slash to avoid access to other
    # user dirs when own name is a prefix of another user name

    base_dir = os.path.abspath(os.path.join(configuration.user_home,
                               client_dir)) + os.sep

    # Unpack in same directory unless real_dst is given

    if not dst:
        real_dst = os.path.abspath(os.path.dirname(real_src))
    elif os.path.isabs(dst):
        real_dst = os.path.abspath(dst)
    else:
        real_dst = os.path.join(base_dir, dst)
    real_dst += os.sep
    mrslfiles_to_parse = []

    real_src_lower = real_src.lower()
    if real_src_lower.endswith('.zip'):

        # Handle .zip file

        msg += "Received '%s' for unpacking. " % relative_src
        try:
            zip_object = zipfile.ZipFile(real_src, 'r')
        except Exception, exc:
            logger.error("open zip failed: %s" % exc)
            msg += 'Could not open zipfile: %s! ' % exc
            return (False, msg)

        logger.info("unpack entries of %s to %s" % \
                                  (real_src, real_dst))
        for zip_entry in zip_object.infolist():
            msg += 'Extracting: %s . ' % zip_entry.filename

            # write zip_entry to disk

            local_zip_entry_name = os.path.join(real_dst, zip_entry.filename)
            valid_status, valid_err = valid_user_path_name(
                zip_entry.filename, local_zip_entry_name, base_dir)
            if not valid_status:
                status = False
                msg += "Filename validation error: %s! " % valid_err
                continue

            # create sub dir(s) if missing

            zip_entry_dir = os.path.dirname(local_zip_entry_name)

            if not os.path.isdir(zip_entry_dir):
                msg += 'Creating dir %s . ' % zip_entry.filename
                try:
                    os.makedirs(zip_entry_dir, 0775)
                except Exception, exc:
                    logger.error("create directory failed: %s" % exc)
                    msg += 'Error creating directory: %s! ' % exc
                    status = False
                    continue

            if os.path.isdir(local_zip_entry_name):
                logger.info("nothing more to do for dir entry: %s" % \
                            local_zip_entry_name)
                continue

            # TODO: can we detect and ignore symlinks?
            # Zip format is horribly designed/documented:
            # http://www.pkware.com/documents/casestudies/APPNOTE.TXT
            # I haven't managed to find a way to detect symlinks. Thus
            # they are simply created as files containing the name they
            # were supposed to link to: This is inconsistent but safe :-S

            # write file - symbolic links are written as files! (good for
            # security)

            if not write_file(zip_object.read(zip_entry.filename),
                              local_zip_entry_name,
                              logger) and \
                              not os.path.exists(local_zip_entry_name):
                msg += 'Error unpacking %s to disk! ' % zip_entry.filename
                status = False
                continue

            # get the size as the OS sees it

            try:
                __ = os.path.getsize(local_zip_entry_name)
            except Exception, exc:
                logger.warning("unpack may have failed: %s" % exc)
                msg += \
                    'File %s unpacked, but could not get file size %s! '\
                     % (zip_entry.filename, exc)
                status = False
                continue
예제 #10
0
파일: archives.py 프로젝트: heromod/migrid
                continue

            elif not tar_entry.isfile():

                # not a regular file - symlinks are ignored to avoid illegal
                # access

                msg += 'Skipping %s: not a regular file or directory! ' % \
                       tar_entry.name
                status = False
                continue

            # write file!

            if not write_file(tar_file_content.extractfile(tar_entry).read(),
                              local_tar_entry_name,
                              logger):
                msg += 'Error unpacking file %s to disk! ' % tar_entry.name
                status = False
                continue

            # get the size as the OS sees it

            try:
                __ = os.path.getsize(local_tar_entry_name)
            except Exception, exc:
                logger.warning("file save may have failed: %s" % exc)
                msg += \
                    'File %s unpacked, but could not get file size %s! ' % \
                    (tar_entry.name, exc)
                status = False
예제 #11
0
파일: textarea.py 프로젝트: heromod/migrid
def main(client_id, user_arguments_dict):
    """Main function used by front end"""

    (configuration, logger, output_objects, op_name) = \
        initialize_main_variables(client_id, op_title=True, op_header=False)
    client_dir = client_id_dir(client_id)
    status = returnvalues.OK
    defaults = signature()[1]
    # TODO: all non-file fields should be validated!!
    # Input fields are mostly file stuff so do not validate it
    validate_args = dict([(key, user_arguments_dict.get(key, val)) for \
                         (key, val) in defaults.items()])
    (validate_status, accepted) = validate_input_and_cert(
        validate_args,
        defaults,
        output_objects,
        client_id,
        configuration,
        allow_rejects=False,
        )
    if not validate_status:
        return (accepted, returnvalues.CLIENT_ERROR)

    if not correct_handler('POST'):
        output_objects.append(
            {'object_type': 'error_text', 'text'
             : 'Only accepting POST requests to prevent unintended updates'})
        return (output_objects, returnvalues.CLIENT_ERROR)

    output_objects.append({'object_type': 'header', 'text'
                          : '%s submit job/file' % configuration.short_title})
    submitstatuslist = []
    fileuploadobjs = []
    filenumber = 0
    file_fields = int(accepted.get('file_fields', -1)[-1])
    save_as_default = (accepted['save_as_default'][-1] != 'False')

    # Please note that base_dir must end in slash to avoid access to other
    # user dirs when own name is a prefix of another user name

    base_dir = os.path.abspath(os.path.join(configuration.user_home,
                               client_dir)) + os.sep

    mrsl = ''
    while True:
        (content, file_type) = handle_form_input(filenumber,
                user_arguments_dict, configuration)

        if not content:
            if filenumber < file_fields:

                # blank field but file_fields indicates more fields
                    
                filenumber += 1
                continue

            # no field count and no data for filenumber found

            break

        # always append mrsltextarea if available!

        try:
            mrsl = user_arguments_dict['mrsltextarea_%s' % filenumber][0]
            content += mrsl
        except:
            pass
        content += '\n'

        mrslfiles_to_parse = []
        submit_mrslfiles = False
        submitmrsl_key = 'submitmrsl_%s' % filenumber
        if user_arguments_dict.has_key(submitmrsl_key):
            val = str(user_arguments_dict[submitmrsl_key][0]).upper()
            if val == 'ON' or val == 'TRUE':
                submit_mrslfiles = True
        fileuploadobj = {'object_type': 'fileuploadobj',
                         'submitmrsl': submit_mrslfiles}

        if file_type == 'plain':

            # get filename

            filename_key = 'FILENAME_%s' % filenumber
            if not user_arguments_dict.has_key(filename_key):
                output_objects.append(
                    {'object_type': 'error_text','text'
                     : ("The specified file_type is 'plain', but a filename" \
                        "value was not found. The missing control should be " \
                        "named %s") % filename_key})
                return (output_objects, returnvalues.CLIENT_ERROR)

            filename_val = convert_control_value_to_line(filename_key,
                                                         user_arguments_dict)
            if not filename_val:
                if filenumber < file_fields:

                    # blank field but file_fields indicates more fields

                    filenumber += 1
                    continue

                output_objects.append(
                    {'object_type': 'error_text', 'text'
                     : 'No filename found - please make sure you provide a " \
                     "file to upload'})
                return (output_objects, returnvalues.CLIENT_ERROR)

            local_filename = base_dir + filename_val
            valid_status, valid_err = valid_user_path_name(filename_val,
                                                           local_filename,
                                                           base_dir)
            if not valid_status:
                output_objects.append(
                    {'object_type': 'error_text', 'text': valid_err})
                return (output_objects, returnvalues.CLIENT_ERROR)

            # A new filename was created, write content to file

            if not write_file(content, local_filename, logger):
                output_objects.append({'object_type': 'error_text',
                        'text': 'Could not write: %s' % local_filename})
                return (output_objects, returnvalues.SYSTEM_ERROR)
            fileuploadobj['saved'] = True

            # msg += "%s created!" % local_filename

            fileuploadobj['name'] = os.sep\
                 + convert_control_value_to_line(filename_key,
                                                 user_arguments_dict)

            if local_filename.upper().endswith('.MRSL')\
                 and submit_mrslfiles:
                mrslfiles_to_parse.append[local_filename]
        elif file_type == 'fileupload':

            # An input type=file was found

            fileupload_key = 'fileupload_%s_0_0' % filenumber

            # if not fileitem.filename:

            if not user_arguments_dict.has_key(fileupload_key
                     + 'filename'):
                output_objects.append({'object_type': 'error_text',
                        'text': 'NO FILENAME error'})
                return (output_objects, returnvalues.CLIENT_ERROR)

            base_name = strip_dir(user_arguments_dict[fileupload_key
                                   + 'filename'])
            if not base_name:
                if filenumber < file_fields:

                    # blank field but file_fields indicates more fields

                    # output_objects.append({'object_type': 'text', 'text':
                    #                        'skip item %d' % filenumber})
                    
                    filenumber += 1
                    continue

                output_objects.append(
                    {'object_type': 'error_text', 'text'
                     : 'No filename found - please make sure you provide a " \
                     "file to upload'})
                return (output_objects, returnvalues.CLIENT_ERROR)

            extract_packages = False
            extract_key = 'extract_%s' % filenumber
            if user_arguments_dict.has_key(extract_key):
                val = str(user_arguments_dict[extract_key][0]).upper()
                if val == 'ON' or val == 'TRUE':
                    extract_packages = True

            remote_filename = ''
            default_remotefilename_key = 'default_remotefilename_%s'\
                 % filenumber
            if user_arguments_dict.has_key(default_remotefilename_key):
                remote_filename = \
                    user_arguments_dict[default_remotefilename_key][0]

            # remotefilename overwrites default_remotefilename if it exists

            remotefilename_key = 'remotefilename_%s' % filenumber
            if user_arguments_dict.has_key(remotefilename_key):
                remote_filename = \
                    user_arguments_dict[remotefilename_key][0]

            if not remote_filename:
                remote_filename = base_name

            # if remote_filename is a directory, use client's local filename
            # for the last part of the filename

            if remote_filename.strip().endswith(os.sep):
                remote_filename += base_name

            if not user_arguments_dict.has_key(fileupload_key):
                output_objects.append({'object_type': 'error_text',
                        'text': 'File content not found!'})
                return (output_objects, returnvalues.CLIENT_ERROR)

            local_filename = os.path.abspath(base_dir + remote_filename)
            valid_status, valid_err = valid_user_path_name(remote_filename,
                                                           local_filename,
                                                           base_dir)
            if not valid_status:
                output_objects.append(
                    {'object_type': 'error_text', 'text': valid_err})
                return (output_objects, returnvalues.CLIENT_ERROR)

            if not os.path.isdir(os.path.dirname(local_filename)):
                try:
                    os.makedirs(os.path.dirname(local_filename), 0777)
                except Exception:
                    fileuploadobj['message'] = \
                        {'object_type': 'error_text',
                         'text': 'Exception creating dirs %s'\
                         % os.path.dirname(local_filename)}
            fileuploadobj['name'] = remote_filename

            # reads uploaded file into memory

            binary = user_arguments_dict.has_key('%s_is_encoded'
                     % fileupload_key)
            if binary:
                data = user_arguments_dict[fileupload_key][-1]
                data = str(base64.decodestring(data))
            else:
                data = user_arguments_dict[fileupload_key][-1]

            # write file in memory to disk

            if not write_file(data, local_filename,
                              configuration.logger):
                output_objects.append(
                    {'object_type': 'error_text',
                     'text': 'Error writing file in memory to disk'})
                return (output_objects, returnvalues.SYSTEM_ERROR)
            fileuploadobj['saved'] = True

            # Tell the client about the current settings (extract and submit)
            # extract_str = "Extract files from packages (.zip, .tar.gz, .tgz, .tar.bz2): "
            # if extract_packages:
            #    extract_str += "ON"
            # else:
            #    extract_str += "OFF"
            # output_objects.append({"object_type":"text", "text":extract_str})

            fileuploadobj['extract_packages'] = extract_packages

            # submit_str = "Submit mRSL files to parser (including .mRSL files in packages!): "
            # if submit_mrslfiles:
            #    submit_str += "ON"
            # else:
            #    submit_str += "OFF"
            # output_objects.append({"object_type":"text", "text":submit_str})

            # handle file package

            if extract_packages\
                 and (local_filename.upper().endswith('.ZIP')
                       or local_filename.upper().endswith('.TAR.GZ')
                       or local_filename.upper().endswith('.TGZ')
                       or local_filename.upper().endswith('.TAR.BZ2')):
                (status, msg) = handle_package_upload(local_filename,
                        remote_filename, client_id, configuration,
                        submit_mrslfiles, os.path.dirname(local_filename))
                if status:
                    if submit_mrslfiles:
                        if isinstance(msg, basestring):
                            output_objects.append(
                                {'object_type': 'error_text',
                                 'text': 'Error in submit: %s' % msg})
                        else:
                            submitstatuslist = msg
                    else:
                        output_objects.append({'object_type': 'text',
                                               'text': msg})
                else:
                    if submit_mrslfiles:
                        if isinstance(msg, basestring):
                            output_objects.append(
                                {'object_type': 'error_text',
                                 'text': 'Error in unpack: %s' % msg})
                        else:
                            submitstatuslist = msg
                    else:
                        output_objects.append({'object_type': 'error_text',
                                               'text': 'Problems unpacking: %s' % msg})
            else:

                # output_objects.append({"object_type":"text", "text":msg})
                # a "normal" (non-package) file was uploaded

                try:
                    output_objects.append({'object_type': 'text', 'text'
                            : 'File saved: %s' % remote_filename})
                except Exception, err:
                    output_objects.append({'object_type': 'error_text',
                            'text'
                            : 'File seems to be saved, but could not get file size %s'
                             % err})
                    return (output_objects, returnvalues.SYSTEM_ERROR)
            fileuploadobj['size'] = os.path.getsize(local_filename)
            fileuploadobj['name'] = remote_filename

            # Check if the extension is .mRSL

            if local_filename.upper().endswith('.MRSL')\
                 and submit_mrslfiles:

                # A .mrsl file was uploaded!
                # output_objects.append({"object_type":"text", "text":
                #                        "File name on MiG server: %s"
                #                        % (remote_filename)})

                mrslfiles_to_parse.append(local_filename)
        else:

            # mrsl file created by html controls. create filename. Loop until
            # a filename that do not exits is created

            html_generated_mrsl_dir = base_dir + 'html_generated_mrsl'
            if os.path.exists(html_generated_mrsl_dir)\
                 and not os.path.isdir(html_generated_mrsl_dir):

                # oops, user might have created a file with the same name

                output_objects.append(
                    {'object_type': 'error_text', 'text'
                     : 'Please make sure %s does not exist or is a directory!'
                     % 'html_generated_mrsl/'})
                return (output_objects, returnvalues.CLIENT_ERROR)
            if not os.path.isdir(html_generated_mrsl_dir):
                os.mkdir(html_generated_mrsl_dir)
            while True:
                time_c = time.gmtime()
                timestamp = '%s_%s_%s__%s_%s_%s' % (
                    time_c[1],
                    time_c[2],
                    time_c[0],
                    time_c[3],
                    time_c[4],
                    time_c[5],
                    )
                local_filename = html_generated_mrsl_dir\
                     + '/TextAreaAt_' + timestamp + '.mRSL'
                if not os.path.isfile(local_filename):
                    break

            # A new filename was created, write content to file

            if not write_file(content, local_filename, logger):
                output_objects.append(
                    {'object_type': 'error_text',
                     'text': 'Could not write: %s' % local_filename})
                return (output_objects, returnvalues.SYSTEM_ERROR)
            fileuploadobj['name'] = os.sep\
                 + 'html_generated_mrsl/TextAreaAt_' + timestamp\
                 + '.mRSL'
            fileuploadobj['size'] = os.path.getsize(local_filename)
            mrslfiles_to_parse.append(local_filename)
        fileuploadobjs.append(fileuploadobj)

        # Submit selected file(s)

        for mrslfile in mrslfiles_to_parse:

            # do not reveal full path of mrsl file to client

            relative_filename = os.sep + mrslfile.replace(base_dir, '')
            submitstatus = {'object_type': 'submitstatus',
                            'name': relative_filename}

            (status, newmsg, job_id) = new_job(mrslfile, client_id,
                    configuration, False, True)
            if not status:

                # output_objects.append({"object_type":"error_text", "text":"%s"
                #                        % newmsg})

                submitstatus['status'] = False
                submitstatus['message'] = newmsg
            else:

                # return (output_objects, returnvalues.CLIENT_ERROR)

                submitstatus['status'] = True
                submitstatus['job_id'] = job_id

                # output_objects.append({"object_type":"text", "text":"%s"
                #                       % newmsg})

            submitstatuslist.append(submitstatus)

        # prepare next loop

        filenumber += 1
예제 #12
0
if __name__ == '__main__':
    print 'starting translation test. Args: ' , len(sys.argv)
    logger.debug('translation for file ' + sys.argv[1] + ' starts')
    if len(sys.argv) > 1:
        fname = sys.argv[1]
        parsed = '.'.join([fname,'parsed'])
        translated = '.'.join([parsed,'xrsl'])

        try:
            import shared.mrslparser as p
            import shared.fileio as fileio

            (presult,errors) = p.parse(fname, 'test-id',
                                       '+No+Client+Id',None,parsed)
            if not presult:
                print 'Errors:\n%s' % errors
            else:
                print 'Parsing OK, now translating'
                mrsl_dict = fileio.unpickle(parsed,logger)
                (xrsl,script,name) = translate(mrsl_dict,'test-name')
                print '\n'.join(['Job name',name,'script',script,'XRSL'])
                fileio.write_file(script, "test-id.sh", logger)
                print (format_xrsl(xrsl))
                fileio.write_file("%s" % xrsl, translated, logger)
                print 'done'
        except Exception, err:
            print 'Error.'
            print err.__str__()

예제 #13
0
파일: chksum.py 프로젝트: ucphhpc/migrid
                    logger.info("%s %s of %s: %s" %
                                (op_name, hash_algo, abs_path, checksum))
                    output_lines.append(line)
                except Exception, exc:
                    output_objects.append({
                        'object_type':
                        'error_text',
                        'text':
                        "%s: '%s': %s" % (op_name, relative_path, exc)
                    })
                    logger.error("%s: failed on '%s': %s" %
                                 (op_name, relative_path, exc))
                    status = returnvalues.SYSTEM_ERROR
                    continue
            entry = {'object_type': 'file_output', 'lines': output_lines}
            output_objects.append(entry)
            all_lines += output_lines

    if dst and not write_file(''.join(all_lines), abs_dest, logger):
        output_objects.append({
            'object_type':
            'error_text',
            'text':
            "failed to write checksums to %s" % relative_dest
        })
        logger.error("writing checksums to %s for %s failed" %
                     (abs_dest, client_id))
        status = returnvalues.SYSTEM_ERROR

    return (output_objects, status)
예제 #14
0
def main(client_id, user_arguments_dict):
    """Main function used by front end"""

    (configuration, logger, output_objects, op_name) = \
        initialize_main_variables(client_id, op_title=True, op_header=False)
    client_dir = client_id_dir(client_id)
    status = returnvalues.OK
    defaults = signature()[1]
    # TODO: do we need to cover more non-file fields?
    # All non-file fields must be validated
    validate_args = dict([(key, user_arguments_dict.get(key, val)) for \
                         (key, val) in defaults.items()])
    # IMPORTANT: we must explicitly inlude CSRF token
    validate_args[csrf_field] = user_arguments_dict.get(
        csrf_field, ['AllowMe'])

    (validate_status, accepted) = validate_input_and_cert(
        validate_args,
        defaults,
        output_objects,
        client_id,
        configuration,
        allow_rejects=False,
    )
    if not validate_status:
        return (accepted, returnvalues.CLIENT_ERROR)

    output_objects.append({
        'object_type':
        'header',
        'text':
        '%s file handling' % configuration.short_title
    })
    submitstatuslist = []
    fileuploadobjs = []
    filenumber = 0
    file_fields = int(accepted.get('file_fields', -1)[-1])
    save_as_default = (accepted['save_as_default'][-1] != 'False')

    if not safe_handler(configuration, 'post', op_name, client_id,
                        get_csrf_limit(configuration), accepted):
        output_objects.append({
            'object_type':
            'error_text',
            'text':
            '''Only accepting
CSRF-filtered POST requests to prevent unintended updates'''
        })
        return (output_objects, returnvalues.CLIENT_ERROR)

    # Please note that base_dir must end in slash to avoid access to other
    # user dirs when own name is a prefix of another user name

    base_dir = os.path.abspath(
        os.path.join(configuration.user_home, client_dir)) + os.sep

    mrsl = ''
    while True:
        (content, file_type) = handle_form_input(filenumber,
                                                 user_arguments_dict,
                                                 configuration)

        if not content:
            if filenumber < file_fields:

                # blank field but file_fields indicates more fields

                filenumber += 1
                continue

            # no field count and no data for filenumber found

            break

        # always append mrsltextarea if available!

        try:
            mrsl = user_arguments_dict['mrsltextarea_%s' % filenumber][0]
            content += mrsl
        except:
            pass
        content += '\n'

        mrslfiles_to_parse = []
        submit_mrslfiles = False
        submitmrsl_key = 'submitmrsl_%s' % filenumber
        if configuration.site_enable_jobs and \
               user_arguments_dict.has_key(submitmrsl_key):
            val = str(user_arguments_dict[submitmrsl_key][0]).upper()
            if val == 'ON' or val == 'TRUE':
                submit_mrslfiles = True
        fileuploadobj = {
            'object_type': 'fileuploadobj',
            'submitmrsl': submit_mrslfiles
        }

        if file_type == 'plain':

            # get filename

            filename_key = 'FILENAME_%s' % filenumber
            if not user_arguments_dict.has_key(filename_key):
                output_objects.append(
                    {'object_type': 'error_text','text'
                     : ("The specified file_type is 'plain', but a filename" \
                        "value was not found. The missing control should be " \
                        "named %s") % filename_key})
                return (output_objects, returnvalues.CLIENT_ERROR)

            filename_val = convert_control_value_to_line(
                filename_key, user_arguments_dict)
            if not filename_val:
                if filenumber < file_fields:

                    # blank field but file_fields indicates more fields

                    filenumber += 1
                    continue

                output_objects.append({
                    'object_type':
                    'error_text',
                    'text':
                    'No filename found - please make sure you provide a " \
                     "file to upload'
                })
                return (output_objects, returnvalues.CLIENT_ERROR)

            local_filename = base_dir + filename_val
            valid_status, valid_err = valid_user_path_name(
                filename_val, local_filename, base_dir)
            if not valid_status:
                output_objects.append({
                    'object_type': 'error_text',
                    'text': valid_err
                })
                return (output_objects, returnvalues.CLIENT_ERROR)

            # A new filename was created, write content to file

            if not write_file(content, local_filename, logger):
                logger.error("%s failed to write plain file %s" % \
                             (op_name, local_filename))
                output_objects.append({
                    'object_type':
                    'error_text',
                    'text':
                    'Could not write: %s' % local_filename
                })
                return (output_objects, returnvalues.SYSTEM_ERROR)
            logger.info("%s wrote plain file %s" % (op_name, local_filename))
            fileuploadobj['saved'] = True

            # msg += "%s created!" % local_filename

            fileuploadobj['name'] = os.sep\
                 + convert_control_value_to_line(filename_key,
                                                 user_arguments_dict)

            if local_filename.upper().endswith('.MRSL')\
                 and submit_mrslfiles:
                mrslfiles_to_parse.append(local_filename)
        elif file_type == 'fileupload':

            # An input type=file was found

            fileupload_key = 'fileupload_%s_0_0' % filenumber

            # if not fileitem.filename:

            if not user_arguments_dict.has_key(fileupload_key + 'filename'):
                output_objects.append({
                    'object_type': 'error_text',
                    'text': 'NO FILENAME error'
                })
                return (output_objects, returnvalues.CLIENT_ERROR)

            base_name = strip_dir(user_arguments_dict[fileupload_key +
                                                      'filename'])
            if not base_name:
                if filenumber < file_fields:

                    # blank field but file_fields indicates more fields

                    # output_objects.append({'object_type': 'text', 'text':
                    #                        'skip item %d' % filenumber})

                    filenumber += 1
                    continue

                output_objects.append({
                    'object_type':
                    'error_text',
                    'text':
                    'No filename found - please make sure you provide a " \
                     "file to upload'
                })
                return (output_objects, returnvalues.CLIENT_ERROR)

            extract_packages = False
            extract_key = 'extract_%s' % filenumber
            if user_arguments_dict.has_key(extract_key):
                val = str(user_arguments_dict[extract_key][0]).upper()
                if val == 'ON' or val == 'TRUE':
                    extract_packages = True

            remote_filename = ''
            default_remotefilename_key = 'default_remotefilename_%s'\
                 % filenumber
            if user_arguments_dict.has_key(default_remotefilename_key):
                remote_filename = \
                    user_arguments_dict[default_remotefilename_key][0]

            # remotefilename overwrites default_remotefilename if it exists

            remotefilename_key = 'remotefilename_%s' % filenumber
            if user_arguments_dict.has_key(remotefilename_key):
                remote_filename = \
                    user_arguments_dict[remotefilename_key][0]

            if not remote_filename:
                remote_filename = base_name

            # if remote_filename is a directory, use client's local filename
            # for the last part of the filename

            if remote_filename.strip().endswith(os.sep):
                remote_filename += base_name

            if not user_arguments_dict.has_key(fileupload_key):
                output_objects.append({
                    'object_type': 'error_text',
                    'text': 'File content not found!'
                })
                return (output_objects, returnvalues.CLIENT_ERROR)

            local_filename = os.path.abspath(base_dir + remote_filename)
            valid_status, valid_err = valid_user_path_name(
                remote_filename, local_filename, base_dir)
            if not valid_status:
                output_objects.append({
                    'object_type': 'error_text',
                    'text': valid_err
                })
                return (output_objects, returnvalues.CLIENT_ERROR)

            if not os.path.isdir(os.path.dirname(local_filename)):
                try:
                    os.makedirs(os.path.dirname(local_filename), 0775)
                except Exception:
                    fileuploadobj['message'] = \
                        {'object_type': 'error_text',
                         'text': 'Exception creating dirs %s'\
                         % os.path.dirname(local_filename)}
            fileuploadobj['name'] = remote_filename

            # reads uploaded file into memory

            binary = user_arguments_dict.has_key('%s_is_encoded' %
                                                 fileupload_key)
            if binary:
                data = user_arguments_dict[fileupload_key][-1]
                data = str(base64.decodestring(data))
            else:
                data = user_arguments_dict[fileupload_key][-1]

            # write file in memory to disk

            if not write_file(data, local_filename, configuration.logger):
                logger.error("%s failed to write upload file %s" % \
                             (op_name, local_filename))
                output_objects.append({
                    'object_type':
                    'error_text',
                    'text':
                    'Error writing file in memory to disk'
                })
                return (output_objects, returnvalues.SYSTEM_ERROR)
            logger.info("%s wrote upload file %s" % (op_name, local_filename))
            fileuploadobj['saved'] = True

            # Tell the client about the current settings (extract and submit)
            # extract_str = "Extract files from packages (.zip, .tar.gz, .tgz, .tar.bz2): "
            # if extract_packages:
            #    extract_str += "ON"
            # else:
            #    extract_str += "OFF"
            # output_objects.append({"object_type":"text", "text":extract_str})

            fileuploadobj['extract_packages'] = extract_packages

            # submit_str = "Submit mRSL files to parser (including .mRSL files in packages!): "
            # if submit_mrslfiles:
            #    submit_str += "ON"
            # else:
            #    submit_str += "OFF"
            # output_objects.append({"object_type":"text", "text":submit_str})

            # handle file package

            if extract_packages\
                 and (local_filename.upper().endswith('.ZIP')
                       or local_filename.upper().endswith('.TAR.GZ')
                       or local_filename.upper().endswith('.TGZ')
                       or local_filename.upper().endswith('.TAR.BZ2')):
                (upload_status,
                 msg) = handle_package_upload(local_filename, remote_filename,
                                              client_id, configuration,
                                              submit_mrslfiles,
                                              os.path.dirname(local_filename))
                if upload_status:
                    if submit_mrslfiles:
                        if isinstance(msg, basestring):
                            output_objects.append({
                                'object_type':
                                'error_text',
                                'text':
                                'Error in submit: %s' % msg
                            })
                        else:
                            submitstatuslist = msg
                    else:
                        output_objects.append({
                            'object_type': 'text',
                            'text': msg
                        })
                else:
                    if submit_mrslfiles:
                        if isinstance(msg, basestring):
                            output_objects.append({
                                'object_type':
                                'error_text',
                                'text':
                                'Error in unpack: %s' % msg
                            })
                        else:
                            submitstatuslist = msg
                    else:
                        output_objects.append({
                            'object_type':
                            'error_text',
                            'text':
                            'Problems unpacking: %s' % msg
                        })
            else:

                # output_objects.append({"object_type":"text", "text":msg})
                # a "normal" (non-package) file was uploaded

                try:
                    output_objects.append({
                        'object_type':
                        'text',
                        'text':
                        'File saved: %s' % remote_filename
                    })
                except Exception, err:
                    output_objects.append({
                        'object_type':
                        'error_text',
                        'text':
                        'File seems to be saved, but could not get file size %s'
                        % err
                    })
                    return (output_objects, returnvalues.SYSTEM_ERROR)
            fileuploadobj['size'] = os.path.getsize(local_filename)
            fileuploadobj['name'] = remote_filename

            # Check if the extension is .mRSL

            if local_filename.upper().endswith('.MRSL')\
                 and submit_mrslfiles:

                # A .mrsl file was uploaded!
                # output_objects.append({"object_type":"text", "text":
                #                        "File name on MiG server: %s"
                #                        % (remote_filename)})

                mrslfiles_to_parse.append(local_filename)
        else:

            # mrsl file created by html controls. create filename. Loop until
            # a filename that do not exits is created

            html_generated_mrsl_dir = base_dir + 'html_generated_mrsl'
            if os.path.exists(html_generated_mrsl_dir)\
                 and not os.path.isdir(html_generated_mrsl_dir):

                # oops, user might have created a file with the same name

                output_objects.append({
                    'object_type':
                    'error_text',
                    'text':
                    'Please make sure %s does not exist or is a directory!' %
                    'html_generated_mrsl/'
                })
                return (output_objects, returnvalues.CLIENT_ERROR)
            if not os.path.isdir(html_generated_mrsl_dir):
                os.mkdir(html_generated_mrsl_dir)
            while True:
                time_c = time.gmtime()
                timestamp = '%s_%s_%s__%s_%s_%s' % (
                    time_c[1],
                    time_c[2],
                    time_c[0],
                    time_c[3],
                    time_c[4],
                    time_c[5],
                )
                local_filename = html_generated_mrsl_dir\
                     + '/TextAreaAt_' + timestamp + '.mRSL'
                if not os.path.isfile(local_filename):
                    break

            # A new filename was created, write content to file

            if not write_file(content, local_filename, logger):
                output_objects.append({
                    'object_type':
                    'error_text',
                    'text':
                    'Could not write: %s' % local_filename
                })
                return (output_objects, returnvalues.SYSTEM_ERROR)
            fileuploadobj['name'] = os.sep\
                 + 'html_generated_mrsl/TextAreaAt_' + timestamp\
                 + '.mRSL'
            fileuploadobj['size'] = os.path.getsize(local_filename)
            mrslfiles_to_parse.append(local_filename)
        fileuploadobjs.append(fileuploadobj)

        # Submit selected file(s)

        for mrslfile in mrslfiles_to_parse:

            # do not reveal full path of mrsl file to client

            relative_filename = os.sep + mrslfile.replace(base_dir, '')
            submitstatus = {
                'object_type': 'submitstatus',
                'name': relative_filename
            }

            (new_status, newmsg, job_id) = new_job(mrslfile, client_id,
                                                   configuration, False, True)
            if not new_status:

                # output_objects.append({"object_type":"error_text", "text":"%s"
                #                        % newmsg})

                submitstatus['status'] = False
                submitstatus['message'] = newmsg
            else:

                # return (output_objects, returnvalues.CLIENT_ERROR)

                submitstatus['status'] = True
                submitstatus['job_id'] = job_id

                # output_objects.append({"object_type":"text", "text":"%s"
                #                       % newmsg})

            submitstatuslist.append(submitstatus)

        # prepare next loop

        filenumber += 1
예제 #15
0
def gen_job_script(
    job_dictionary, resource_config, configuration, localjobname, path_without_extension, client_dir, exe, logger
):
    """Generate job script from job_dictionary before handout to resource"""

    script_language = resource_config["SCRIPTLANGUAGE"]
    if not script_language in configuration.scriptlanguages:
        print "Unknown script language! (conflict with scriptlanguages in " + "configuration?) %s not in %s" % (
            script_language,
            configuration.scriptlanguages,
        )
        return False

    if script_language == "python":
        generator = genjobscriptpython.GenJobScriptPython(
            job_dictionary,
            resource_config,
            exe,
            configuration.migserver_https_sid_url,
            localjobname,
            path_without_extension,
        )
    elif script_language == "sh":
        generator = genjobscriptsh.GenJobScriptSh(
            job_dictionary,
            resource_config,
            exe,
            configuration.migserver_https_sid_url,
            localjobname,
            path_without_extension,
        )
    elif script_language == "java":
        generator = genjobscriptjava.GenJobScriptJava(
            job_dictionary, resource_config, configuration.migserver_https_sid_url, localjobname, path_without_extension
        )
    else:
        print "Unknown script language! (is in configuration but not in " + "jobscriptgenerator) %s " % script_language
        return False

    # String concatenation in python: [X].join is much faster
    # than repeated use of s += strings

    getinputfiles_array = []
    getinputfiles_array.append(generator.script_init())
    getinputfiles_array.append(generator.comment("print start"))
    getinputfiles_array.append(generator.print_start("get input files"))
    getinputfiles_array.append(generator.comment("init log"))
    getinputfiles_array.append(generator.init_io_log())
    getinputfiles_array.append(generator.comment("get special inputfiles"))
    getinputfiles_array.append(generator.get_special_input_files("get_special_status"))
    getinputfiles_array.append(generator.log_io_status("get_special_input_files", "get_special_status"))
    getinputfiles_array.append(
        generator.print_on_error("get_special_status", "0", "failed to fetch special input files!")
    )
    getinputfiles_array.append(generator.comment("get input files"))
    getinputfiles_array.append(generator.get_input_files("get_input_status"))
    getinputfiles_array.append(generator.log_io_status("get_input_files", "get_input_status"))
    getinputfiles_array.append(generator.print_on_error("get_input_status", "0", "failed to fetch input files!"))
    getinputfiles_array.append(generator.comment("get executables"))
    getinputfiles_array.append(generator.get_executables("get_executables_status"))
    getinputfiles_array.append(generator.log_io_status("get_executables", "get_executables_status"))
    getinputfiles_array.append(
        generator.print_on_error("get_executables_status", "0", "failed to fetch executable files!")
    )

    # client_dir equals empty_job_name for sleep jobs

    getinputfiles_array.append(
        generator.generate_output_filelists(client_dir != configuration.empty_job_name, "generate_output_filelists")
    )
    getinputfiles_array.append(
        generator.print_on_error("generate_output_filelists", "0", "failed to generate output filelists!")
    )
    getinputfiles_array.append(generator.generate_input_filelist("generate_input_filelist"))
    getinputfiles_array.append(
        generator.print_on_error("generate_input_filelist", "0", "failed to generate input filelist!")
    )
    getinputfiles_array.append(generator.generate_iosessionid_file("generate_iosessionid_file"))
    getinputfiles_array.append(
        generator.print_on_error("generate_iosessionid_file", "0", "failed to generate iosessionid file!")
    )
    getinputfiles_array.append(generator.generate_mountsshprivatekey_file("generate_mountsshprivatekey_file"))
    getinputfiles_array.append(
        generator.print_on_error("generate_mountsshprivatekey_file", "0", "failed to generate mountsshprivatekey file!")
    )
    getinputfiles_array.append(generator.generate_mountsshknownhosts_file("generate_mountsshknownhosts_file"))
    getinputfiles_array.append(
        generator.print_on_error("generate_mountsshknownhosts_file", "0", "failed to generate mountsshknownhosts file!")
    )
    getinputfiles_array.append(
        generator.total_status(
            ["get_special_status", "get_input_status", "get_executables_status", "generate_output_filelists"],
            "total_status",
        )
    )
    getinputfiles_array.append(generator.exit_on_error("total_status", "0", "total_status"))
    getinputfiles_array.append(generator.comment("exit script"))
    getinputfiles_array.append(generator.exit_script("0", "get input files"))

    job_array = []
    job_array.append(generator.script_init())
    job_array.append(generator.set_core_environments())
    job_array.append(generator.print_start("job"))
    job_array.append(generator.comment("TODO: switch to job directory here"))
    job_array.append(generator.comment("make sure job status files exist"))
    job_array.append(
        generator.create_files(
            [
                job_dictionary["JOB_ID"] + ".stdout",
                job_dictionary["JOB_ID"] + ".stderr",
                job_dictionary["JOB_ID"] + ".status",
            ]
        )
    )
    job_array.append(generator.init_status())
    job_array.append(generator.comment("chmod +x"))
    job_array.append(generator.chmod_executables("chmod_status"))
    job_array.append(generator.print_on_error("chmod_status", "0", "failed to make one or more EXECUTABLES executable"))
    job_array.append(generator.log_on_error("chmod_status", "0", "system: chmod"))

    job_array.append(generator.comment("set environments"))
    job_array.append(generator.set_environments("env_status"))
    job_array.append(generator.print_on_error("env_status", "0", "failed to initialize one or more ENVIRONMENTs"))
    job_array.append(generator.log_on_error("env_status", "0", "system: set environments"))

    job_array.append(generator.comment("set runtimeenvironments"))
    job_array.append(generator.set_runtime_environments(resource_config["RUNTIMEENVIRONMENT"], "re_status"))
    job_array.append(generator.print_on_error("re_status", "0", "failed to initialize one or more RUNTIMEENVIRONMENTs"))
    job_array.append(generator.log_on_error("re_status", "0", "system: set RUNTIMEENVIRONMENTs"))

    job_array.append(generator.comment("enforce some basic job limits"))
    job_array.append(generator.set_limits())
    if job_dictionary.get("MOUNT", []) != []:
        job_array.append(generator.comment("Mount job home"))
        job_array.append(
            generator.mount(
                job_dictionary["SESSIONID"],
                configuration.user_sftp_show_address,
                configuration.user_sftp_show_port,
                "mount_status",
            )
        )
        job_array.append(generator.print_on_error("mount_status", "0", "failded to mount job home"))
        job_array.append(generator.log_on_error("mount_status", "0", "system: mount"))
    job_array.append(generator.comment("execute!"))
    job_array.append(generator.execute("EXECUTING: ", "--Exit code:"))
    if job_dictionary.get("MOUNT", []) != []:
        job_array.append(generator.comment("Unmount job home"))
        job_array.append(generator.umount("umount_status"))
        job_array.append(generator.print_on_error("umount_status", "0", "failded to umount job home"))
        job_array.append(generator.log_on_error("umount_status", "0", "system: umount"))
    job_array.append(generator.comment("exit script"))
    job_array.append(generator.exit_script("0", "job"))

    getupdatefiles_array = []

    # We need to make sure that curl failures lead to retry while
    # missing output (from say a failed job) is logged but
    # ignored in relation to getupdatefiles success.

    getupdatefiles_array.append(generator.print_start("get update files"))
    getupdatefiles_array.append(generator.init_io_log())

    getupdatefiles_array.append(generator.comment("get io files"))
    getupdatefiles_array.append(generator.get_io_files("get_io_status"))
    getupdatefiles_array.append(generator.log_io_status("get_io_files", "get_io_status"))
    getupdatefiles_array.append(generator.print_on_error("get_io_status", "0", "failed to get one or more IO files"))
    getupdatefiles_array.append(generator.exit_on_error("get_io_status", "0", "get_io_status"))

    getupdatefiles_array.append(generator.comment("exit script"))
    getupdatefiles_array.append(generator.exit_script("0", "get update files"))

    sendoutputfiles_array = []

    # We need to make sure that curl failures lead to retry while
    # missing output (from say a failed job) is logged but
    # ignored in relation to sendoutputfiles success.

    sendoutputfiles_array.append(generator.print_start("send output files"))
    sendoutputfiles_array.append(generator.init_io_log())
    sendoutputfiles_array.append(generator.comment("check output files"))

    sendoutputfiles_array.append(generator.output_files_missing("missing_counter"))
    sendoutputfiles_array.append(generator.log_io_status("output_files_missing", "missing_counter"))
    sendoutputfiles_array.append(generator.print_on_error("missing_counter", "0", "missing output files"))
    sendoutputfiles_array.append(generator.comment("send output files"))
    sendoutputfiles_array.append(generator.send_output_files("send_output_status"))
    sendoutputfiles_array.append(generator.log_io_status("send_output_files", "send_output_status"))
    sendoutputfiles_array.append(
        generator.print_on_error("send_output_status", "0", "failed to send one or more outputfiles")
    )
    sendoutputfiles_array.append(generator.exit_on_error("send_output_status", "0", "send_output_status"))

    sendoutputfiles_array.append(generator.comment("send io files"))
    sendoutputfiles_array.append(generator.send_io_files("send_io_status"))
    sendoutputfiles_array.append(generator.log_io_status("send_io_files", "send_io_status"))
    sendoutputfiles_array.append(generator.print_on_error("send_io_status", "0", "failed to send one or more IO files"))
    sendoutputfiles_array.append(generator.exit_on_error("send_io_status", "0", "send_io_status"))
    sendoutputfiles_array.append(generator.comment("send status files"))
    sendoutputfiles_array.append(
        generator.send_status_files([job_dictionary["JOB_ID"] + ".io-status"], "send_io_status_status")
    )
    sendoutputfiles_array.append(
        generator.print_on_error("send_io_status_status", "0", "failed to send io-status file")
    )
    sendoutputfiles_array.append(generator.exit_on_error("send_io_status_status", "0", "send_io_status_status"))

    # Please note that .status upload marks the end of the
    # session and thus it must be the last uploaded file.

    sendoutputfiles_array.append(
        generator.send_status_files([job_dictionary["JOB_ID"] + ".status"], "send_status_status")
    )
    sendoutputfiles_array.append(generator.print_on_error("send_status_status", "0", "failed to send status file"))
    sendoutputfiles_array.append(generator.exit_on_error("send_status_status", "0", "send_status_status"))

    # Note that ID.sendouputfiles is called from frontend_script
    # so exit on failure can be handled there.

    sendoutputfiles_array.append(generator.comment("exit script"))
    sendoutputfiles_array.append(generator.exit_script("0", "send output files"))

    sendupdatefiles_array = []

    # We need to make sure that curl failures lead to retry while
    # missing output (from say a failed job) is logged but
    # ignored in relation to sendupdatefiles success.

    sendupdatefiles_array.append(generator.print_start("send update files"))
    sendupdatefiles_array.append(generator.init_io_log())

    sendupdatefiles_array.append(generator.comment("send io files"))
    sendupdatefiles_array.append(generator.send_io_files("send_io_status"))
    sendupdatefiles_array.append(generator.log_io_status("send_io_files", "send_io_status"))
    sendupdatefiles_array.append(generator.print_on_error("send_io_status", "0", "failed to send one or more IO files"))
    sendupdatefiles_array.append(generator.exit_on_error("send_io_status", "0", "send_io_status"))

    sendupdatefiles_array.append(generator.comment("exit script"))
    sendupdatefiles_array.append(generator.exit_script("0", "send update files"))

    # clean up must be done with SSH (when the .status file
    # has been uploaded): Job script can't safely/reliably clean up
    # after itself because of possible user interference.

    if job_dictionary.has_key("JOBTYPE") and job_dictionary["JOBTYPE"].lower() == "interactive":

        # interactive jobs have a .job file just containing a curl
        # call to the MiG servers cgi-sid/requestinteractivejob
        # and the usual .job is instead called .interactivejob and
        # is SCP'ed and started by SSH in the requestinteractive.py
        # script

        logger.error("jobtype: interactive")
        interactivejobfile = (
            generator.script_init()
            + "\n"
            + generator.request_interactive()
            + "\n"
            + generator.exit_script("0", "interactive job")
        )

        # write the small file containing the requestinteractivejob.py
        # call as .job

        write_file(interactivejobfile, configuration.mig_system_files + job_dictionary["JOB_ID"] + ".job", logger)

        # write the usual .job file as .interactivejob

        write_file(
            "\n".join(job_array), configuration.mig_system_files + job_dictionary["JOB_ID"] + ".interactivejob", logger
        )
        print interactivejobfile
    else:

        # write files

        write_file("\n".join(job_array), configuration.mig_system_files + job_dictionary["JOB_ID"] + ".job", logger)

    write_file("\n".join(getinputfiles_array), path_without_extension + ".getinputfiles", logger)
    write_file(
        "\n".join(getupdatefiles_array),
        configuration.mig_system_files + job_dictionary["JOB_ID"] + ".getupdatefiles",
        logger,
    )
    write_file(
        "\n".join(sendoutputfiles_array),
        configuration.mig_system_files + job_dictionary["JOB_ID"] + ".sendoutputfiles",
        logger,
    )
    write_file(
        "\n".join(sendupdatefiles_array),
        configuration.mig_system_files + job_dictionary["JOB_ID"] + ".sendupdatefiles",
        logger,
    )

    return True
예제 #16
0
def main(client_id, user_arguments_dict):
    """Main function used by front end"""
    (configuration, logger, output_objects, op_name) = \
        initialize_main_variables(client_id, op_header=False)
    client_dir = client_id_dir(client_id)
    defaults = signature()[1]
    (validate_status, accepted) = validate_input_and_cert(
        user_arguments_dict,
        defaults,
        output_objects,
        client_id,
        configuration,
        allow_rejects=False,
    )

    if not validate_status:
        return (accepted, returnvalues.CLIENT_ERROR)

    logger.debug("User: %s executing %s" % (client_id, op_name))
    if not configuration.site_enable_jupyter:
        output_objects.append({
            'object_type':
            'error_text',
            'text':
            'The Jupyter service is not enabled on the system'
        })
        return (output_objects, returnvalues.SYSTEM_ERROR)

    if not configuration.site_enable_sftp_subsys and not \
            configuration.site_enable_sftp:
        output_objects.append({
            'object_type':
            'error_text',
            'text':
            'The required sftp service is not enabled on the system'
        })
        return (output_objects, returnvalues.SYSTEM_ERROR)

    if configuration.site_enable_sftp:
        sftp_port = configuration.user_sftp_port

    if configuration.site_enable_sftp_subsys:
        sftp_port = configuration.user_sftp_subsys_port

    requested_service = accepted['service'][-1]
    service = {
        k: v
        for options in configuration.jupyter_services
        for k, v in options.items()
        if options['service_name'] == requested_service
    }

    if not service:
        valid_services = [
            options['name'] for options in configuration.jupyter_services
        ]
        output_objects.append({
            'object_type':
            'error_text',
            'text':
            '%s is not a valid jupyter service, '
            'allowed include %s' % (requested_service, valid_services)
        })
        return (output_objects, returnvalues.SYSTEM_ERROR)

    valid_service = valid_jupyter_service(configuration, service)
    if not valid_service:
        output_objects.append({
            'object_type':
            'error_text',
            'text':
            'The service %s appears to be misconfigured, '
            'please contact a system administrator about this issue' %
            requested_service
        })
        return (output_objects, returnvalues.SYSTEM_ERROR)

    host = get_host_from_service(configuration, service)
    # Get an active jupyterhost
    if host is None:
        logger.error("No active jupyterhub host could be found")
        output_objects.append({
            'object_type':
            'error_text',
            'text':
            'Failed to establish connection to the %s Jupyter service' %
            service['service_name']
        })
        output_objects.append({
            'object_type': 'link',
            'destination': 'jupyter.py',
            'text': 'Back to Jupyter services overview'
        })
        return (output_objects, returnvalues.SYSTEM_ERROR)

    remote_user = unescape(os.environ.get('REMOTE_USER', '')).strip()
    if not remote_user:
        logger.error("Can't connect to jupyter with an empty REMOTE_USER "
                     "environment variable")
        output_objects.append({
            'object_type':
            'error_text',
            'text':
            'Failed to establish connection to the Jupyter service'
        })
        return (output_objects, returnvalues.CLIENT_ERROR)
    # Ensure the remote_user dict can be http posted
    remote_user = str(remote_user)

    # TODO, activate admin info
    # remote_user = {'USER': username, 'IS_ADMIN': is_admin(client_id,
    #                                                      configuration,
    # logger)}

    # Regular sftp path
    mnt_path = os.path.join(configuration.jupyter_mount_files_dir, client_dir)
    # Subsys sftp path
    subsys_path = os.path.join(configuration.mig_system_files, 'jupyter_mount')
    # sftp session path
    link_home = configuration.sessid_to_jupyter_mount_link_home

    user_home_dir = os.path.join(configuration.user_home, client_dir)

    # Preparing prerequisites
    if not os.path.exists(mnt_path):
        os.makedirs(mnt_path)

    if not os.path.exists(link_home):
        os.makedirs(link_home)

    if configuration.site_enable_sftp_subsys:
        if not os.path.exists(subsys_path):
            os.makedirs(subsys_path)

    # Make sure ssh daemon does not complain
    tighten_key_perms(configuration, client_id)

    url_base = '/' + service['service_name']
    url_home = url_base + '/home'
    url_auth = host + url_base + '/hub/login'
    url_data = host + url_base + '/hub/user-data'

    # Does the client home dir contain an active mount key
    # If so just keep on using it.
    jupyter_mount_files = [
        os.path.join(mnt_path, jfile) for jfile in os.listdir(mnt_path)
        if jfile.endswith('.jupyter_mount')
    ]

    logger.info("User: %s mount files: %s" %
                (client_id, "\n".join(jupyter_mount_files)))
    logger.debug("Remote-User %s" % remote_user)
    active_mounts = []
    for jfile in jupyter_mount_files:
        jupyter_dict = unpickle(jfile, logger)
        if not jupyter_dict:
            # Remove failed unpickle
            logger.error("Failed to unpickle %s removing it" % jfile)
            remove_jupyter_mount(jfile, configuration)
        else:
            # Mount has been timed out
            if not is_active(jupyter_dict):
                remove_jupyter_mount(jfile, configuration)
            else:
                # Valid mount
                active_mounts.append({'path': jfile, 'state': jupyter_dict})

    logger.debug(
        "User: %s active keys: %s" %
        (client_id, "\n".join([mount['path'] for mount in active_mounts])))

    # If multiple are active, remove oldest
    active_mount, old_mounts = get_newest_mount(active_mounts)
    for mount in old_mounts:
        remove_jupyter_mount(mount['path'], configuration)

    # A valid active key is already present redirect straight to the jupyter
    # service, pass most recent mount information
    if active_mount is not None:
        mount_dict = mig_to_mount_adapt(active_mount['state'])
        user_dict = mig_to_user_adapt(active_mount['state'])
        logger.debug("Existing header values, Mount: %s User: %s" %
                     (mount_dict, user_dict))

        auth_header = {'Remote-User': remote_user}
        json_data = {'data': {'Mount': mount_dict, 'User': user_dict}}

        if configuration.site_enable_workflows:
            workflows_dict = mig_to_workflows_adapt(active_mount['state'])
            if not workflows_dict:
                # No cached workflows session could be found -> refresh with a
                # one
                workflow_session_id = get_workflow_session_id(
                    configuration, client_id)
                if not workflow_session_id:
                    workflow_session_id = create_workflow_session_id(
                        configuration, client_id)
                # TODO get this dynamically
                url = configuration.migserver_https_sid_url + \
                    '/cgi-sid/workflowsjsoninterface.py?output_format=json'
                workflows_dict = {
                    'WORKFLOWS_URL': url,
                    'WORKFLOWS_SESSION_ID': workflow_session_id
                }

            logger.debug("Existing header values, Workflows: %s" %
                         workflows_dict)
            json_data['workflows_data'] = {'Session': workflows_dict}

        with requests.session() as session:
            # Authenticate and submit data
            response = session.post(url_auth, headers=auth_header)
            if response.status_code == 200:
                response = session.post(url_data, json=json_data)
                if response.status_code != 200:
                    logger.error(
                        "Jupyter: User %s failed to submit data %s to %s" %
                        (client_id, json_data, url_data))
            else:
                logger.error(
                    "Jupyter: User %s failed to authenticate against %s" %
                    (client_id, url_auth))

        # Redirect client to jupyterhub
        return jupyter_host(configuration, output_objects, remote_user,
                            url_home)

    # Create a new keyset
    # Create login session id
    session_id = generate_random_ascii(2 * session_id_bytes,
                                       charset='0123456789abcdef')

    # Generate private/public keys
    (mount_private_key,
     mount_public_key) = generate_ssh_rsa_key_pair(encode_utf8=True)

    # Known hosts
    sftp_addresses = socket.gethostbyname_ex(
        configuration.user_sftp_show_address or socket.getfqdn())

    # Subsys sftp support
    if configuration.site_enable_sftp_subsys:
        # Restrict possible mount agent
        auth_content = []
        restrict_opts = 'no-agent-forwarding,no-port-forwarding,no-pty,'
        restrict_opts += 'no-user-rc,no-X11-forwarding'
        restrictions = '%s' % restrict_opts
        auth_content.append('%s %s\n' % (restrictions, mount_public_key))
        # Write auth file
        write_file('\n'.join(auth_content),
                   os.path.join(subsys_path, session_id + '.authorized_keys'),
                   logger,
                   umask=027)

    logger.debug("User: %s - Creating a new jupyter mount keyset - "
                 "private_key: %s public_key: %s " %
                 (client_id, mount_private_key, mount_public_key))

    jupyter_dict = {
        'MOUNT_HOST': configuration.short_title,
        'SESSIONID': session_id,
        'USER_CERT': client_id,
        # don't need fraction precision, also not all systems provide fraction
        # precision.
        'CREATED_TIMESTAMP': int(time.time()),
        'MOUNTSSHPRIVATEKEY': mount_private_key,
        'MOUNTSSHPUBLICKEY': mount_public_key,
        # Used by the jupyterhub to know which host to mount against
        'TARGET_MOUNT_ADDR': "@" + sftp_addresses[0] + ":",
        'PORT': sftp_port
    }
    client_email = extract_field(client_id, 'email')
    if client_email:
        jupyter_dict.update({'USER_EMAIL': client_email})

    if configuration.site_enable_workflows:
        workflow_session_id = get_workflow_session_id(configuration, client_id)
        if not workflow_session_id:
            workflow_session_id = create_workflow_session_id(
                configuration, client_id)
        # TODO get this dynamically
        url = configuration.migserver_https_sid_url + \
            '/cgi-sid/workflowsjsoninterface.py?output_format=json'
        jupyter_dict.update({
            'WORKFLOWS_URL': url,
            'WORKFLOWS_SESSION_ID': workflow_session_id
        })

    # Only post the required keys, adapt to API expectations
    mount_dict = mig_to_mount_adapt(jupyter_dict)
    user_dict = mig_to_user_adapt(jupyter_dict)
    workflows_dict = mig_to_workflows_adapt(jupyter_dict)
    logger.debug("User: %s Mount header: %s" % (client_id, mount_dict))
    logger.debug("User: %s User header: %s" % (client_id, user_dict))
    if workflows_dict:
        logger.debug("User: %s Workflows header: %s" %
                     (client_id, workflows_dict))

    # Auth and pass a new set of valid mount keys
    auth_header = {'Remote-User': remote_user}
    json_data = {'data': {'Mount': mount_dict, 'User': user_dict}}
    if workflows_dict:
        json_data['workflows_data'] = {'Session': workflows_dict}

    # First login
    with requests.session() as session:
        # Authenticate
        response = session.post(url_auth, headers=auth_header)
        if response.status_code == 200:
            response = session.post(url_data, json=json_data)
            if response.status_code != 200:
                logger.error(
                    "Jupyter: User %s failed to submit data %s to %s" %
                    (client_id, json_data, url_data))
        else:
            logger.error("Jupyter: User %s failed to authenticate against %s" %
                         (client_id, url_auth))

    # Update pickle with the new valid key
    jupyter_mount_state_path = os.path.join(mnt_path,
                                            session_id + '.jupyter_mount')

    pickle(jupyter_dict, jupyter_mount_state_path, logger)

    # Link jupyter pickle state file
    linkdest_new_jupyter_mount = os.path.join(mnt_path,
                                              session_id + '.jupyter_mount')

    linkloc_new_jupyter_mount = os.path.join(link_home,
                                             session_id + '.jupyter_mount')
    make_symlink(linkdest_new_jupyter_mount, linkloc_new_jupyter_mount, logger)

    # Link userhome
    linkloc_user_home = os.path.join(link_home, session_id)
    make_symlink(user_home_dir, linkloc_user_home, logger)

    return jupyter_host(configuration, output_objects, remote_user, url_home)
예제 #17
0
if __name__ == '__main__':
    print 'starting translation test. Args: ' , len(sys.argv)
    logger.debug('translation for file ' + sys.argv[1] + ' starts')
    if len(sys.argv) > 1:
        fname = sys.argv[1]
        parsed = '.'.join([fname,'parsed'])
        translated = '.'.join([parsed,'xrsl'])

        try:
            import shared.mrslparser as p
            import shared.fileio as fileio

            (presult,errors) = p.parse(fname, 'test-id',
                                       '+No+Client+Id',None,parsed)
            if not presult:
                print 'Errors:\n%s' % errors
            else:
                print 'Parsing OK, now translating'
                mrsl_dict = fileio.unpickle(parsed,logger)
                (xrsl,script,name) = translate(mrsl_dict,'test-name')
                print '\n'.join(['Job name',name,'script',script,'XRSL'])
                fileio.write_file(script, "test-id.sh", logger)
                print (format_xrsl(xrsl))
                fileio.write_file("%s" % xrsl, translated, logger)
                print 'done'
        except Exception, err:
            print 'Error.'
            print err.__str__()

예제 #18
0
def main(client_id, user_arguments_dict):
    """Main function used by front end"""

    (configuration, logger, output_objects, op_name) = \
        initialize_main_variables(client_id, op_header=False)
    client_dir = client_id_dir(client_id)
    defaults = signature()[1]
    (validate_status, accepted) = validate_input_and_cert(
        user_arguments_dict,
        defaults,
        output_objects,
        client_id,
        configuration,
        allow_rejects=False,
    )
    if not validate_status:
        return (accepted, returnvalues.CLIENT_ERROR)

    unique_resource_name = accepted['unique_resource_name'][-1]
    resconfig = accepted['resconfig'][-1]

    if not safe_handler(configuration, 'post', op_name, client_id,
                        get_csrf_limit(configuration), accepted):
        output_objects.append({
            'object_type':
            'error_text',
            'text':
            '''Only accepting
CSRF-filtered POST requests to prevent unintended updates'''
        })
        return (output_objects, returnvalues.CLIENT_ERROR)

    output_objects.append({
        'object_type': 'header',
        'text': 'Trying to Update resource configuration'
    })

    if not is_owner(client_id, unique_resource_name,
                    configuration.resource_home, logger):
        logger.error(client_id + ' is not an owner of ' +
                     unique_resource_name + ': update rejected!')
        output_objects.append({
            'object_type':
            'error_text',
            'text':
            'You must be an owner of ' + unique_resource_name +
            ' to update the configuration!'
        })
        return (output_objects, returnvalues.CLIENT_ERROR)

    # TODO: race if two confs are uploaded concurrently!

    host_url, host_identifier = unique_resource_name.rsplit('.', 1)
    pending_file = os.path.join(configuration.resource_home,
                                unique_resource_name, 'config.tmp')

    # write new proposed config file to disk
    try:
        logger.info('write to file: %s' % pending_file)
        if not write_file(resconfig, pending_file, logger):
            output_objects.append({
                'object_type': 'error_text',
                'text': 'Could not write: %s' % pending_file
            })
            return (output_objects, returnvalues.SYSTEM_ERROR)
    except Exception, err:
        logger.error('Resource conf %s could not be written: %s' % \
                     (pending_file, err))
        output_objects.append({
            'object_type': 'error_text',
            'text': 'Could not write configuration!'
        })
        return (output_objects, returnvalues.SYSTEM_ERROR)
예제 #19
0
    except Exception, err:
        # error during translation, pass a message
        logger.error('Error during xRSL translation: %s' % err.__str__())
        return (None, err.__str__())

        # we submit directly from here (the other version above does
        # copyFileToResource and gen_job_script generates all files)

    # we have to put the generated script somewhere..., and submit from there.
    # inputfiles are given by the user as relative paths from his home,
    # so we should use that location (and clean up afterwards).

    # write script (to user home)
    user_home = os.path.join(configuration.user_home, client_dir)
    script_path = os.path.abspath(os.path.join(user_home, script_name))
    write_file(script, script_path, logger)

    os.chdir(user_home)

    try:
        logger.debug('submitting job to ARC')
        session = arc.Ui(user_home)
        arc_job_ids = session.submit(xrsl)

        # if no exception occurred, we are done:

        job_dict['ARCID'] = arc_job_ids[0]
        job_dict['SESSIONID'] = sessionid

        msg = 'OK'
        result = job_dict
예제 #20
0
             })
        return (output_objects, returnvalues.CLIENT_ERROR)

    # create directory to store vgrid public_base files

    try:
        os.mkdir(public_base_dir)
        pub_readme = os.path.join(public_base_dir, 'README')
        if not os.path.exists(pub_readme):
            write_file("""= Public Web Page =
This directory is used for hosting the public web page for the %s %s.
It is accessible by the public from the %ss page or directly using the URL
%s/vgrid/%s/

Just update the index.html file to suit your wishes for an entry page. It can
link to any other material in this folder or subfolders with relative
addresses. So it is possible to create a full web site with multiple pages and
rich content like on other web hosting services. However, there's no support
for server side scripting with Python, ASP or PHP for security reasons.
""" % (vgrid_name, configuration.site_vgrid_label,
       configuration.site_vgrid_label, configuration.migserver_http_url,
       vgrid_name),
                       pub_readme, logger)
        pub_entry_page = os.path.join(public_base_dir, 'index.html')
        if not os.path.exists(pub_entry_page):
            write_file("""<!DOCTYPE html>
<html>
<head>
<meta http-equiv='Content-Type' content='text/html;charset=utf-8'/>
<title>Public entry page not created yet..</title>
</head>
<body>
예제 #21
0
파일: archives.py 프로젝트: ucphhpc/migrid
                status = False
                continue
            
            # TODO: can we detect and ignore symlinks?
            # Zip format is horribly designed/documented:
            # http://www.pkware.com/documents/casestudies/APPNOTE.TXT
            # I haven't managed to find a way to detect symlinks. Thus
            # they are simply created as files containing the name they
            # were supposed to link to: This is inconsistent but safe :-S

            # write file - symbolic links are written as files! (good for
            # security).

            # NB: Needs to use undecoded filename here

            if not write_file(zip_data, local_zip_entry_name, logger) and \
                   not os.path.exists(local_zip_entry_name):
                msg += 'Error unpacking %s to disk! ' % entry_filename
                status = False
                continue

            # get the size as the OS sees it

            try:
                __ = os.path.getsize(local_zip_entry_name)
            except Exception, exc:
                logger.warning("unpack may have failed: %s" % exc)
                msg += \
                    'File %s unpacked, but could not get file size %s! '\
                     % (entry_filename, exc)
                status = False
예제 #22
0
                logger.error("create_frozen_archive: failed: %s" % msg)
                remove_rec(frozen_dir, configuration)
                return (False, 'Error writing frozen archive')
        else:
            (status, msg) = move_file(real_source, freeze_path, configuration)
            if not status:
                logger.error("create_frozen_archive: failed: %s" % msg)
                remove_rec(frozen_dir, configuration)
                return (False, 'Error writing frozen archive')
    logger.info("create_frozen_archive: save %s for %s" % \
                              ([i[0] for i in freeze_upload], freeze_id))
    for (filename, contents) in freeze_upload:
        freeze_path = os.path.join(frozen_dir, filename)
        frozen_files.append(filename)
        logger.debug("create_frozen_archive: write %s" % freeze_path)
        if not write_file(contents, freeze_path, logger):
            logger.error("create_frozen_archive: failed: %s" % err)
            remove_rec(frozen_dir, configuration)
            return (False, 'Error writing frozen archive')

    if freeze_dict['PUBLISH']:
        published_id = public_freeze_id(freeze_dict)
        public_meta = [('CREATOR', 'Owner'), ('NAME', 'Name'),
                       ('DESCRIPTION', 'Description'),
                       ('CREATED_TIMESTAMP', 'Date')]

        # Use the default preamle to get style, skin and so on right
        
        contents = get_cgi_html_preamble(configuration, "Public Archive: %s" % \
                                         published_id, "", widgets=False)