Exemple #1
0
def refresh_user_map(configuration):
    """Refresh map of users and their configuration. Uses a pickled
    dictionary for efficiency. 
    User IDs are stored in their raw (non-anonymized form).
    Only update map for users that updated conf after last map save.
    """
    dirty = []
    map_path = os.path.join(configuration.mig_system_files, "user.map")
    lock_path = os.path.join(configuration.mig_system_files, "user.lock")
    lock_handle = open(lock_path, 'a')
    fcntl.flock(lock_handle.fileno(), fcntl.LOCK_EX)
    user_map, map_stamp = load_user_map(configuration, do_lock=False)

    # Find all users and their configurations
    
    all_users = list_users(configuration.user_home)
    real_map = real_to_anon_user_map(configuration.user_home)
    for user in all_users:
        settings_path = os.path.join(configuration.user_settings,
                                     client_id_dir(user), settings_filename)
        profile_path = os.path.join(configuration.user_settings,
                                    client_id_dir(user), profile_filename)
        settings_mtime, profile_mtime = 0, 0
        if os.path.isfile(settings_path):
            settings_mtime = os.path.getmtime(settings_path)
        if os.path.isfile(profile_path):
            profile_mtime = os.path.getmtime(profile_path)

        if settings_mtime + profile_mtime > 0:
            conf_mtime = max(settings_mtime, profile_mtime)
        else:
            conf_mtime = -1
        # init first time
        user_map[user] = user_map.get(user, {})
        if not user_map[user].has_key(CONF) or conf_mtime >= map_stamp:
            user_conf = get_user_conf(user, configuration, True)
            if not user_conf:
                user_conf = {}
            user_map[user][CONF] = user_conf
            public_id = user
            if user_conf.get('ANONYMOUS', True):
                public_id = real_map[user]
            user_map[user][USERID] = public_id
            user_map[user][MODTIME] = map_stamp
            dirty += [user]
    # Remove any missing users from map
    missing_user = [user for user in user_map.keys() \
                   if not user in all_users]
    for user in missing_user:
        del user_map[user]
        dirty += [user]

    if dirty:
        try:
            dump(user_map, map_path)
        except Exception, exc:
            configuration.logger.error("Could not save user map: %s" % exc)
Exemple #2
0
def update_section_helper(client_id,
                          configuration,
                          section_filename,
                          changes,
                          defaults,
                          create_missing=True):
    """Update settings section in pickled file with values from changes
    dictionary. Optional create_missing can be used if the pickle should be
    created if not already there.
    The defaults dictionary is used to set any missing values.
    """

    client_dir = client_id_dir(client_id)
    section_path = os.path.join(configuration.user_settings, client_dir,
                                section_filename)
    if not os.path.exists(section_path):
        if create_missing:
            section_dict = {}
        else:
            raise Exception('no %s file to update!' % section_filename)
    else:
        section_dict = unpickle(section_path, configuration.logger)
    for (key, val) in defaults.items():
        section_dict[key] = section_dict.get(key, val)
    section_dict.update(changes)
    if not pickle(section_dict, section_path, configuration.logger):
        raise Exception('could not save updated %s file!' % section_filename)
    return section_dict
Exemple #3
0
def main(client_id, user_arguments_dict):
    """Main function used by front end"""

    (configuration, logger, output_objects, op_name) = \
                    initialize_main_variables(client_id, op_header=False)
    client_dir = client_id_dir(client_id)
    defaults = signature()[1]
    (validate_status, accepted) = validate_input_and_cert(
      user_arguments_dict,
      defaults,
      output_objects,
      client_id,
      configuration,
      allow_rejects=False,
      )
    if not validate_status:
        return (accepted, returnvalues.CLIENT_ERROR)
  
    status = returnvalues.OK
  
    title_entry = find_entry(output_objects, 'title')
    title_entry['text'] = 'Job Manager'
    title_entry['style'] = css_tmpl(configuration)
    title_entry['javascript'] = js_tmpl()
  
    output_objects.append({'object_type': 'header', 'text': 'Job Manager'})
    output_objects.append({'object_type': 'html_form', 'text': html_pre()})
    output_objects.append({'object_type': 'table_pager', 'entry_name': 'jobs',
                           'default_entries': default_pager_entries,
                           'form_append': pager_append()})
    output_objects.append({'object_type': 'html_form', 'text': html_post()})
  
    return (output_objects, status)
Exemple #4
0
def tighten_key_perms(configuration, client_id, keys_dirname=ssh_conf_dir):
    """Make sure permissions on client home and keys_dirname there are tight
    enough for sshd not to complain. In practice umask 022 or larger must be
    enforced all the way up to first root owned parent dir.
    """
    _logger = configuration.logger
    client_dir = client_id_dir(client_id)
    # NOTE: first remove any trailing slashes for dirname to be consistent
    user_base_dir = configuration.user_home.rstrip(os.sep)
    state_base_dir = os.path.dirname(user_base_dir)
    user_home_dir = os.path.join(user_base_dir, client_dir)
    user_ssh_dir = os.path.join(user_home_dir, keys_dirname)

    # Check for correct permissions (umask >= 022) on ssh dir and parents
    check_dirs = [state_base_dir, user_base_dir, user_home_dir, user_ssh_dir]
    fixed_dirs = []
    for path in check_dirs:
        # check perms and limit if needed
        if os.path.exists(path) and os.stat(path).st_mode & 022:
            old_perm = os.stat(path).st_mode & 0777
            limit_perm = old_perm & 0755
            _logger.warning("%s has invalid ssh permissions %s, reset to %s" %
                            (path, oct(old_perm), oct(limit_perm)))
            os.chmod(path, limit_perm)
            fixed_dirs.append(path)
    return fixed_dirs
Exemple #5
0
def handle_proxy(proxy_string, client_id, config):
    """If ARC-enabled server: store a proxy certificate.
       Arguments: proxy_string - text  extracted from given upload
                  client_id  - DN for user just being created
                  config     - global configuration
    """

    output = []
    client_dir = client_id_dir(client_id)
    proxy_dir = os.path.join(config.user_home, client_dir)
    proxy_path = os.path.join(config.user_home, client_dir, arc.Ui.proxy_name)

    if not config.arc_clusters:
        output.append({'object_type': 'error_text', 'text': 'No ARC support!'})
        return output

    # store the file

    try:
        write_file(proxy_string, proxy_path, config.logger)
        os.chmod(proxy_path, 0600)
    except Exception, exc:
        output.append({
            'object_type':
            'error_text',
            'text':
            'Proxy file could not be written (%s)!' %
            str(exc).replace(proxy_dir, '')
        })
        return output
Exemple #6
0
def _parse_and_save_auth_pw_keys(publickeys, password, client_id,
                                 configuration, proto, proto_conf_dir):
    """Validate and write publickey and password settings for proto
    (ssh/davs/ftps) in proto_conf_dir.
    """
    client_dir = client_id_dir(client_id)
    proto_conf_path = os.path.join(configuration.user_home, client_dir,
                                 proto_conf_dir)
    # Create proto conf dir for any old users
    try:
        os.mkdir(proto_conf_path)
    except:
        pass
    keys_path = os.path.join(proto_conf_path, authkeys_filename)
    key_status = parse_and_save_publickeys(keys_path, publickeys, client_id,
                                           configuration)
    pw_path = os.path.join(proto_conf_path, authpasswords_filename)
    pw_status = parse_and_save_passwords(pw_path, password, client_id,
                                         configuration)
    digest_path = os.path.join(proto_conf_path, authdigests_filename)
    if proto == 'davs':
        digest_status = parse_and_save_digests(digest_path, password, client_id,
                                               configuration)
    else:
        digest_status = (True, '')
    status = (key_status[0] and pw_status[0] and digest_status[0],
              key_status[1] + pw_status[1] + digest_status[1])
    if status[0]:
        mark_user_modified(configuration, client_id)
    return status
Exemple #7
0
def get_job_ids_with_specified_project_name(
    client_id,
    project_name,
    mrsl_files_dir,
    logger,
    ):
    """Helper for finding a job with a given project field"""

    client_dir = client_id_dir(client_id)

    # Please note that base_dir must end in slash to avoid access to other
    # user dirs when own name is a prefix of another user name

    base_dir = os.path.abspath(os.path.join(mrsl_files_dir, client_dir)) \
         + os.sep

    # this is heavy :-/ we must loop all the mrsl files submitted by the user
    # to find the job ids belonging to the specified project

    matching_job_ids = []
    all_files = os.listdir(base_dir)

    for mrsl_file in all_files:
        job_dict = unpickle(base_dir + os.sep + mrsl_file, logger)
        if not job_dict:
            continue
        if job_dict.has_key('PROJECT'):
            if job_dict['PROJECT'] == project_name:
                matching_job_ids.append(job_dict['JOB_ID'])
    return matching_job_ids
Exemple #8
0
def generate_user_key(configuration, client_id, key_filename, truncate=False):
    """Generate a new key and save it as key_filename in settings dir"""
    # TODO: switch to paramiko key generation?
    logger = configuration.logger
    key_dir = os.path.join(configuration.user_settings,
                           client_id_dir(client_id), user_keys_dir)
    key_path = os.path.join(key_dir, key_filename)
    makedirs_rec(key_dir, configuration)
    if os.path.exists(key_path) and not truncate:
        logger.error("user key %s already exists!" % key_path)
        return (False, 'user key %s already exists!' % key_filename)
    logger.debug("generating user key %s" % key_path)
    gen_proc = subprocess_popen([
        'ssh-keygen', '-t', default_key_type, '-b',
        '%d' % default_key_bits, '-f', key_path, '-N', '', '-C', key_filename
    ],
                                stdout=subprocess_pipe,
                                stderr=subprocess_pipe)
    exit_code = gen_proc.wait()
    out, err = gen_proc.communicate()
    if exit_code != 0:
        logger.error("user key generation in %s failed: %s %s (%s)" % \
                     (key_path, out, err, exit_code))
        return (False, "user key generation in %s failed!" % key_filename)
    logger.info('done generating user key %s: %s : %s (%s)' % \
                (key_path, out, err, exit_code))
    pub_key = ''
    try:
        pub_fd = open(key_path + '.pub')
        pub_key = pub_fd.read()
        pub_fd.close()
    except Exception, exc:
        logger.error("user key generation %s did not create a pub key: %s" % \
                     (key_path, exc))
        return (False, "user key generation in %s failed!" % key_filename)
Exemple #9
0
def handle_proxy(proxy_string, client_id, config):
    """If ARC-enabled server: store a proxy certificate.
       Arguments: proxy_string - text  extracted from given upload 
                  client_id  - DN for user just being created 
                  config     - global configuration
    """

    output = []
    client_dir = client_id_dir(client_id)
    proxy_dir = os.path.join(config.user_home, client_dir)
    proxy_path = os.path.join(config.user_home, client_dir, arc.Ui.proxy_name)

    if not config.arc_clusters:
        output.append({'object_type': 'error_text', 'text':
                       'No ARC support!'})
        return output

    # store the file
    try:
        write_file(proxy_string, proxy_path, config.logger)
        os.chmod(proxy_path, 0600)
    except Exception, exc:
        output.append({'object_type': 'error_text', 'text'
                              : 'Proxy file could not be written (%s)!'
                               % str(exc).replace(proxy_dir, '')})
        return output
Exemple #10
0
def edit_vm(client_id, configuration, machine_name, machine_specs):
    """Updates the vm configuration for vm with given machine_name"""

    # Grab the base directory of the user

    client_dir = client_id_dir(client_id)
    user_home = os.path.abspath(os.path.join(configuration.user_home,
                                             client_dir))

    vms_conf_paths = glob(os.path.join(user_home, vm_base, machine_name,
                                       '*.cfg'))

    # Grab the configuration file defining the machine

    for conf_path in vms_conf_paths:
        vm_config = ConfigParser.ConfigParser()
        vm_config.read([conf_path])
        for (key, val) in machine_specs.items():
            if not isinstance(val, basestring) and isinstance(val, list):
                string_val = ''
                for entry in val:
                    string_val += '%s ' % entry
            else:
                string_val = val
            vm_config.set('MiG', key, string_val)
        conf_fd = open(conf_path, 'w')
        vm_config.write(conf_fd)
        conf_fd.close()
    return (True, '')
Exemple #11
0
def __cron_log(configuration, client_id, msg, level="info"):
    """Wrapper to send a single msg to user cron log file"""

    client_dir = client_id_dir(client_id)
    log_dir_path = os.path.join(configuration.user_home, client_dir,
                                cron_output_dir)
    log_path = os.path.join(log_dir_path, cron_log_name)
    if not os.path.exists(log_dir_path):
        try:
            os.makedirs(log_dir_path)
        except:
            pass
    cron_logger = logging.getLogger('cron')
    cron_logger.setLevel(logging.INFO)
    handler = logging.handlers.RotatingFileHandler(
        log_path, maxBytes=cron_log_size, backupCount=cron_log_cnt - 1)
    formatter = logging.Formatter('%(asctime)s %(levelname)s %(message)s')
    handler.setFormatter(formatter)
    cron_logger.addHandler(handler)
    if level == 'error':
        cron_logger.error(msg)
    elif level == 'warning':
        cron_logger.warning(msg)
    else:
        cron_logger.info(msg)
    handler.flush()
    handler.close()
    cron_logger.removeHandler(handler)
Exemple #12
0
def get_allowed_path(configuration, client_id, path):
    """Check certificate data and path for either a valid user/server
    or a resource using a valid session id. If the check succeeds, the
    real path to the file is returned.
    """

    client_dir = client_id_dir(client_id)

    # Check cert and decide if it is a user, resource or server

    if not client_id:
        path_slash_stripped = path.lstrip("/")
        sessionid = path_slash_stripped[: path_slash_stripped.find("/")]

        # check that the sessionid is ok (does symlink exist?)

        if not os.path.islink(configuration.webserver_home + sessionid):
            raise Exception("Invalid session id!")

        target_dir = configuration.webserver_home + path_slash_stripped[: path_slash_stripped.rfind("/")]
        target_file = path_slash_stripped[path_slash_stripped.rfind("/") + 1 :]
    elif is_user(client_id, configuration.mig_server_home):
        real_path = os.path.normpath(os.path.join(configuration.user_home, client_dir, path))
        target_dir = os.path.dirname(real_path)
        target_file = os.path.basename(real_path)
    elif is_server(client_id, configuration.server_home):
        real_path = os.path.normpath(os.path.join(configuration.server_home, client_dir, path))
        target_dir = os.path.dirname(real_path)
        target_file = os.path.basename(real_path)
    else:
        raise Exception("Invalid credentials %s: no such user or server" % client_id)

    target_path = target_dir + "/" + target_file
    return target_path
Exemple #13
0
def _parse_form_xfer(xfer, user_args, client_id, configuration):
    """Parse xfer request (i.e. copy, move or upload) file/dir entries from
    user_args.
    """
    _logger = configuration.logger
    files, rejected = [], []
    i = 0
    client_dir = client_id_dir(client_id)
    base_dir = os.path.abspath(os.path.join(configuration.user_home,
                                            client_dir)) + os.sep
    xfer_pattern = 'freeze_%s_%%d' % xfer
    for i in xrange(max_freeze_files):
        if user_args.has_key(xfer_pattern % i):
            source_path = user_args[xfer_pattern % i][-1].strip()
            source_path = os.path.normpath(source_path).lstrip(os.sep)
            _logger.debug('found %s entry: %s' % (xfer, source_path))
            if not source_path:
                continue
            try:
                valid_path(source_path)
            except Exception, exc:
                rejected.append('invalid path: %s (%s)' % (source_path,
                                                           exc))
                continue
            # IMPORTANT: path must be expanded to abs for proper chrooting
            abs_path = os.path.abspath(
                os.path.join(base_dir, source_path))
            # Prevent out-of-bounds, and restrict some greedy targets
            if not valid_user_path(configuration, abs_path, base_dir, True):
                _logger.error('found illegal directory traversal %s entry: %s'
                              % (xfer, source_path))
                rejected.append('invalid path: %s (%s)' %
                                (source_path, 'illegal path!'))
                continue
            elif os.path.exists(abs_path) and os.path.samefile(abs_path,
                                                               base_dir):
                _logger.warning('refusing archival of entire user home %s: %s'
                                % (xfer, source_path))
                rejected.append('invalid path: %s (%s)' %
                                (source_path, 'entire home not allowed!'))
                continue
            elif in_vgrid_share(configuration, abs_path) == source_path:
                _logger.warning(
                    'refusing archival of entire vgrid shared folder %s: %s' %
                    (xfer, source_path))
                rejected.append('invalid path: %s (%s)' %
                                (source_path, 'entire %s share not allowed!'
                                 % configuration.site_vgrid_label))
                continue

            # expand any dirs recursively
            if os.path.isdir(abs_path):
                for (root, dirnames, filenames) in os.walk(abs_path):
                    for subname in filenames:
                        abs_sub = os.path.join(root, subname)
                        sub_base = root.replace(abs_path, source_path)
                        sub_path = os.path.join(sub_base, subname)
                        files.append((abs_sub, sub_path.lstrip(os.sep)))
            else:
                files.append((abs_path, source_path.lstrip(os.sep)))
Exemple #14
0
def _load_auth_pw_keys(client_id,
                       configuration,
                       proto,
                       proto_conf_dir,
                       allow_missing=True):
    """Helper to load  keys and password for proto (ssh/davs/ftps/seafile)
    from user proto_conf_dir. Optional allow_missing is used to toggle the log
    errors about missing pw/keys files, which may not already exist.
    """
    section_dict = {}
    client_dir = client_id_dir(client_id)
    keys_path = os.path.join(configuration.user_home, client_dir,
                             proto_conf_dir, authkeys_filename)
    pw_path = os.path.join(configuration.user_home, client_dir, proto_conf_dir,
                           authpasswords_filename)
    digest_path = os.path.join(configuration.user_home, client_dir,
                               proto_conf_dir, authdigests_filename)
    try:
        keys_fd = open(keys_path)
        section_dict['authkeys'] = keys_fd.read()
        keys_fd.close()
    except Exception, exc:
        if not allow_missing:
            configuration.logger.error("load %s publickeys failed: %s" %
                                       (proto, exc))
Exemple #15
0
def clean_arc_job(job_dict,
                  status,
                  msg,
                  configuration,
                  logger,
                  kill=True,
                  timestamp=None):
    """Cleaning remainder of an executed ARC job:
        - delete from ARC (and possibly kill the job, parameter)
        - delete two symbolic links (user dir and mrsl file)
        - write status and timestamp into mrsl 
    """

    logger.debug('Cleanup for ARC job %s, status %s' %
                 (job_dict['JOB_ID'], status))

    if not status in ['FINISHED', 'CANCELED', 'FAILED']:
        logger.error('inconsistent cleanup request: %s for job %s' % \
                     (status, job_dict))
        return

    # done by the caller...
    # executing_queue.dequeue_job_by_id(job_dict['JOB_ID'])

    if not timestamp:
        timestamp = time.gmtime()
    client_dir = client_id_dir(job_dict['USER_CERT'])

    # clean up in ARC
    try:
        userdir = os.path.join(configuration.user_home, client_dir)
        arcsession = arc.Ui(userdir)
    except Exception, err:
        logger.error('Error cleaning up ARC job: %s' % err)
        logger.debug('Job was: %s' % job_dict)
Exemple #16
0
def handle_update(configuration,
                  client_id,
                  resource_id,
                  user_vars,
                  output_objects,
                  new_resource=False):
    """Update existing resource configuration from request"""

    logger = configuration.logger
    client_dir = client_id_dir(client_id)
    tmp_id = "%s.%s" % (user_vars['HOSTURL'], time.time())
    pending_file = os.path.join(configuration.resource_pending, client_dir,
                                tmp_id)
    conf_file = os.path.join(configuration.resource_home, resource_id,
                             'config.MiG')
    output = ''
    try:
        logger.info('write to file: %s' % pending_file)
        write_resource_config(configuration, user_vars, pending_file)
    except Exception, err:
        logger.error('Resource conf %s could not be written: %s' %
                     (pending_file, err))
        output_objects.append({
            'object_type': 'error_text',
            'text': 'Could not write configuration!'
        })
        return False
Exemple #17
0
def get_job_ids_with_specified_project_name(
    client_id,
    project_name,
    mrsl_files_dir,
    logger,
    ):
    """Helper for finding a job with a given project field"""

    client_dir = client_id_dir(client_id)

    # Please note that base_dir must end in slash to avoid access to other
    # user dirs when own name is a prefix of another user name

    base_dir = os.path.abspath(os.path.join(mrsl_files_dir, client_dir)) \
         + os.sep

    # this is heavy :-/ we must loop all the mrsl files submitted by the user
    # to find the job ids belonging to the specified project

    matching_job_ids = []
    all_files = os.listdir(base_dir)

    for mrsl_file in all_files:
        job_dict = unpickle(base_dir + os.sep + mrsl_file, logger)
        if not job_dict:
            continue
        if job_dict.has_key('PROJECT'):
            if job_dict['PROJECT'] == project_name:
                matching_job_ids.append(job_dict['JOB_ID'])
    return matching_job_ids
Exemple #18
0
def edit_vm(client_id, configuration, machine_name, machine_specs):
    """Updates the vm configuration for vm with given machine_name"""

    # Grab the base directory of the user

    client_dir = client_id_dir(client_id)
    user_home = os.path.abspath(os.path.join(configuration.user_home,
                                             client_dir))

    vms_conf_paths = glob(os.path.join(user_home, vm_base, machine_name,
                                       '*.cfg'))

    # Grab the configuration file defining the machine

    for conf_path in vms_conf_paths:
        vm_config = ConfigParser.ConfigParser()
        vm_config.read([conf_path])
        for (key, val) in machine_specs.items():
            if not isinstance(val, basestring) and isinstance(val, list):
                string_val = ''
                for entry in val:
                    string_val += '%s ' % entry
            else:
                string_val = val
            vm_config.set('MiG', key, string_val)
        conf_fd = open(conf_path, 'w')
        vm_config.write(conf_fd)
        conf_fd.close()
    return (True, '')
Exemple #19
0
def lookup_full_user(username):
    """Look up the full user identity for username consisting of e.g. just an
    email address.
    The method to extract the full identity depends on the back end database.
    If username matches either the openid link, the full ID or the dir version
    from it, a tuple with the expanded username and the full user dictionary
    is returned.
    On no match a tuple with the unchanged username and an empty dictionary
    is returned.
    """
    # print "DEBUG: lookup full user for %s" % username
    
    db_path = os.path.join(configuration.mig_code_base, 'server', 
                           'MiG-users.db')
    # print "DEBUG: Loading user DB"
    id_map = load_user_db(db_path)

    login_url = os.path.join(configuration.user_openid_providers[0], username)
    distinguished_name = get_openid_user_dn(configuration, login_url)

    # print "DEBUG: compare against %s" % full_id
    if distinguished_name in id_map:
        url_friendly = client_id_dir(distinguished_name)
        return (url_friendly, id_map[distinguished_name])
    return (username, {})
Exemple #20
0
def main(client_id, user_arguments_dict):
    """Main function used by front end"""

    (configuration, logger, output_objects, op_name) = \
        initialize_main_variables(client_id)
    client_dir = client_id_dir(client_id)
    status = returnvalues.OK
    defaults = signature()[1]
    (validate_status, accepted) = validate_input_and_cert(
        user_arguments_dict,
        defaults,
        output_objects,
        client_id,
        configuration,
        allow_rejects=False,
    )
    if not validate_status:
        return (accepted, returnvalues.CLIENT_ERROR)

    if not safe_handler(configuration, 'post', op_name, client_id,
                        get_csrf_limit(configuration), accepted):
        output_objects.append({
            'object_type':
            'error_text',
            'text':
            '''Only accepting
CSRF-filtered POST requests to prevent unintended updates'''
        })
        return (output_objects, returnvalues.CLIENT_ERROR)

    if not configuration.site_enable_jobs:
        output_objects.append({
            'object_type':
            'error_text',
            'text':
            '''Job execution is not enabled on this system'''
        })
        return (output_objects, returnvalues.SYSTEM_ERROR)

    external_dict = get_keywords_dict(configuration)
    mrsl = fields_to_mrsl(configuration, user_arguments_dict, external_dict)

    tmpfile = None

    # save to temporary file

    try:
        (filehandle, real_path) = tempfile.mkstemp(text=True)
        relative_path = os.path.basename(real_path)
        os.write(filehandle, mrsl)
        os.close(filehandle)
    except Exception, err:
        output_objects.append({
            'object_type':
            'error_text',
            'text':
            'Failed to write temporary mRSL file: %s' % err
        })
        return (output_objects, returnvalues.SYSTEM_ERROR)
Exemple #21
0
def main(client_id, user_arguments_dict):
    """Main function used by front end"""

    (configuration, logger, output_objects, op_name) = \
        initialize_main_variables(client_id, op_header=False)
    client_dir = client_id_dir(client_id)
    defaults = signature()[1]
    (validate_status, accepted) = validate_input_and_cert(
        user_arguments_dict,
        defaults,
        output_objects,
        client_id,
        configuration,
        allow_rejects=False,
        )
    if not validate_status:
        return (accepted, returnvalues.CLIENT_ERROR)

    if not correct_handler('POST'):
        output_objects.append(
            {'object_type': 'error_text', 'text'
             : 'Only accepting POST requests to prevent unintended updates'})
        return (output_objects, returnvalues.CLIENT_ERROR)

    unique_resource_name = accepted['unique_resource_name'][-1]
    resconfig = accepted['resconfig'][-1]

    output_objects.append({'object_type': 'header', 'text'
                          : 'Trying to Update resource configuration'})

    if not is_owner(client_id, unique_resource_name,
                    configuration.resource_home, logger):
        logger.error(client_id + ' is not an owner of '
                      + unique_resource_name + ': update rejected!')
        output_objects.append({'object_type': 'error_text', 'text'
                              : 'You must be an owner of '
                               + unique_resource_name
                               + ' to update the configuration!'})
        return (output_objects, returnvalues.CLIENT_ERROR)

    # TODO: race if two confs are uploaded concurrently!

    host_url, host_identifier = unique_resource_name.rsplit('.', 1)
    pending_file = os.path.join(configuration.resource_home,
                            unique_resource_name, 'config.tmp')

    # write new proposed config file to disk
    try:
        logger.info('write to file: %s' % pending_file)
        if not write_file(resconfig, pending_file, logger):
                output_objects.append({'object_type': 'error_text',
                        'text': 'Could not write: %s' % pending_file})
                return (output_objects, returnvalues.SYSTEM_ERROR)
    except Exception, err:
        logger.error('Resource conf %s could not be written: %s' % \
                     (pending_file, err))
        output_objects.append({'object_type': 'error_text', 'text':
                               'Could not write configuration!'})
        return (output_objects, returnvalues.SYSTEM_ERROR)
Exemple #22
0
def refresh_job_stats(configuration, client_id):
    """Refresh job stats for specified user"""
    dirty = False
    client_dir = client_id_dir(client_id)
    job_base = os.path.join(configuration.mrsl_files_dir, client_dir)
    stats_base = os.path.join(configuration.user_cache, client_dir)
    stats_path = os.path.join(stats_base, "job-stats.pck")
    lock_path = stats_path + ".lock"

    try:
        os.makedirs(stats_base)
    except:
        pass

    lock_handle = open(lock_path, 'a')

    fcntl.flock(lock_handle.fileno(), fcntl.LOCK_EX)

    job_stats = {PARSE: 0, QUEUED: 0, EXECUTING:0, FINISHED: 0, RETRY: 0,
                    CANCELED: 0, EXPIRED: 0, FAILED: 0, FROZEN: 0}
    try:
        stats = load(stats_path)
        stats_stamp = os.path.getmtime(stats_path)
        # Backwards compatible update
        job_stats.update(stats[JOBS])
        stats[JOBS] = job_stats
    except IOError:
        configuration.logger.warn("No job stats to load - ok first time")
        stats = {JOBS: job_stats}
        stats_stamp = -1

    now = time.time()
    if now < stats_stamp + JOB_REFRESH_DELAY:
        lock_handle.close()
        return stats        

    # Inspect all jobs in user job dir and update the ones that changed
    # since last stats run
    for name in os.listdir(job_base):
        if stats.has_key(name) and stats[name]["STATUS"] in FINAL_STATES:
            continue

        job_path = os.path.join(job_base, name)
        job_stamp = os.path.getmtime(job_path)
        
        if stats.has_key(name) and job_stamp < stats_stamp:
            continue

        dirty = True
        job = load(job_path)
        update_job_stats(stats, name, job)

    if dirty:
        try:
            dump(stats, stats_path)
            stats_stamp = os.path.getmtime(stats_path)
        except Exception, exc:
            configuration.logger.error("Could not save stats cache: %s" % exc)
Exemple #23
0
def get_status_dir(configuration, client_id, transfer_id=''):
    """Lookup the status directory for transfers on behalf of client_id.
    The optional transfer_id is used to get the explicit status dir for that
    particular transfer rather than the parent status directory.
    This is used for writing the global transfer log as well as individual
    status, stdout, stderr and possibly transfer.log files for the transfers.
    """
    return os.path.join(configuration.user_home, client_id_dir(client_id),
                        transfer_output_dir, transfer_id).rstrip(os.sep)
Exemple #24
0
def migrate_job(config, job, peer):
    protocol = 'https'
    port = ''

    server = peer['fqdn']

    # Remove schedule hint from job before migration

    del job['SCHEDULE_HINT']

    # Make sure legacy jobs don't fail

    if not job.has_key('MIGRATE_COUNT'):
        job['MIGRATE_COUNT'] = str(0)

    # Add or increment migration counter

    migrate_count = int(job['MIGRATE_COUNT']) + 1
    job['MIGRATE_COUNT'] = str(migrate_count)

    # TODO: only upload if job is not already replicated at
    # remote server
    # TMP!

    steal_job = False

    if not steal_job:

        # upload pickled job to server

        client_dir = client_id_dir(job['USER_CERT'])
        mrsl_filename = config.mrsl_files_dir + client_dir + '/'\
             + job['JOB_ID'] + '.mRSL'
        result = pickle(job, mrsl_filename, config.logger)
        if not result:
            config.logger.error('Aborting migration of job %s (%s)',
                                job['JOB_ID'], result)
            return False

        dest = mrsl_filename

        # TMP!
        # upload_reply = put_data(config, mrsl_filename, protocol, server, port, dest)

        config.logger.warning('Actual migration disabled until fully supported'
                              )
        upload_reply = (-1,
                        'Actual migration disabled until fully supported'
                        )
        if upload_reply[0] != http_success:
            return False

    # migration_msg = ""
    # migration_reply = put_data(config, protocol, server, port, migration_msg)

    return True
Exemple #25
0
def migrate_job(config, job, peer):
    protocol = 'https'
    port = ''

    server = peer['fqdn']

    # Remove schedule hint from job before migration

    del job['SCHEDULE_HINT']

    # Make sure legacy jobs don't fail

    if not job.has_key('MIGRATE_COUNT'):
        job['MIGRATE_COUNT'] = str(0)

    # Add or increment migration counter

    migrate_count = int(job['MIGRATE_COUNT']) + 1
    job['MIGRATE_COUNT'] = str(migrate_count)

    # TODO: only upload if job is not already replicated at
    # remote server
    # TMP!

    steal_job = False

    if not steal_job:

        # upload pickled job to server

        client_dir = client_id_dir(job['USER_CERT'])
        mrsl_filename = config.mrsl_files_dir + client_dir + '/'\
             + job['JOB_ID'] + '.mRSL'
        result = pickle(job, mrsl_filename, config.logger)
        if not result:
            config.logger.error('Aborting migration of job %s (%s)',
                                job['JOB_ID'], result)
            return False

        dest = mrsl_filename

        # TMP!
        # upload_reply = put_data(config, mrsl_filename, protocol, server, port, dest)

        config.logger.warning('Actual migration disabled until fully supported'
                              )
        upload_reply = (-1,
                        'Actual migration disabled until fully supported'
                        )
        if upload_reply[0] != http_success:
            return False

    # migration_msg = ""
    # migration_reply = put_data(config, protocol, server, port, migration_msg)

    return True
Exemple #26
0
def main(client_id, user_arguments_dict):
    """Main function used by front end"""

    (configuration, logger, output_objects, op_name) = \
        initialize_main_variables(client_id, op_header=False)
    defaults = signature()[1]
    (validate_status, accepted) = validate_input_and_cert(
        user_arguments_dict,
        defaults,
        output_objects,
        client_id,
        configuration,
        allow_rejects=False,
    )
    if not validate_status:
        return (accepted, returnvalues.CLIENT_ERROR)

    user_dir = os.path.join(configuration.user_home, client_id_dir(client_id))

    title_entry = find_entry(output_objects, 'title')
    title_entry['text'] = 'ARC Queues'
    output_objects.append({
        'object_type': 'header',
        'text': 'Available ARC queues'
    })

    if not configuration.site_enable_griddk:
        output_objects.append({
            'object_type':
            'text',
            'text':
            '''Grid.dk features are disabled on this site.
Please contact the site admins %s if you think they should be enabled.
''' % configuration.admin_email
        })
        return (output_objects, returnvalues.OK)

    # could factor out from here, to be usable from outside
    if not configuration.arc_clusters:
        output_objects.append({
            'object_type': 'error_text',
            'text': 'No ARC support!'
        })
        return (output_objects, returnvalues.ERROR)
    try:
        session = arc.Ui(user_dir)
        queues = session.getQueues()

    except arc.NoProxyError, err:
        output_objects.append({
            'object_type': 'error_text',
            'text': 'Error while retrieving: %s' % err.what()
        })
        output_objects += arc.askProxy()
        return (output_objects, returnvalues.ERROR)
Exemple #27
0
def main(client_id, user_arguments_dict):
    """Main function used by front end"""

    (configuration, logger, output_objects, op_name) = \
        initialize_main_variables(client_id, op_header=False)
    client_dir = client_id_dir(client_id)
    defaults = signature()[1]
    (validate_status, accepted) = validate_input_and_cert(
        user_arguments_dict,
        defaults,
        output_objects,
        client_id,
        configuration,
        allow_rejects=False,
    )
    if not validate_status:
        return (accepted, returnvalues.CLIENT_ERROR)

    if not configuration.site_enable_jobs:
        output_objects.append({
            'object_type':
            'error_text',
            'text':
            '''Job execution is not enabled on this system'''
        })
        return (output_objects, returnvalues.SYSTEM_ERROR)

    status = returnvalues.OK

    title_entry = find_entry(output_objects, 'title')
    title_entry['text'] = 'Job Manager'
    user_settings = title_entry.get('user_settings', {})
    title_entry['style'] = css_tmpl(configuration, user_settings)
    csrf_map = {}
    method = 'post'
    limit = get_csrf_limit(configuration)
    for target_op in csrf_backends:
        csrf_map[target_op] = make_csrf_token(configuration, method, target_op,
                                              client_id, limit)
    (add_import, add_init, add_ready) = js_tmpl_parts(csrf_map)
    title_entry['script']['advanced'] += add_import
    title_entry['script']['init'] += add_init
    title_entry['script']['ready'] += add_ready

    output_objects.append({'object_type': 'header', 'text': 'Job Manager'})
    output_objects.append({
        'object_type': 'table_pager',
        'entry_name': 'jobs',
        'default_entries': default_pager_entries,
        'form_append': pager_append()
    })
    output_objects.append({'object_type': 'html_form', 'text': html_post()})

    return (output_objects, status)
Exemple #28
0
def delete_user_key(configuration, client_id, key_filename):
    """Delete the user key key_filename in settings dir"""
    key_dir = os.path.join(configuration.user_settings,
                           client_id_dir(client_id), user_keys_dir)
    pub_filename = "%s.pub" % key_filename
    status, msg = True, ""
    for filename in (key_filename, pub_filename):
        path = os.path.join(key_dir, filename)
        if not delete_file(path, configuration.logger):
            msg += "removal of user key '%s' failed! \n" % filename
            status = False
    return (status, msg)
Exemple #29
0
def load_user_keys(configuration, client_id):
    """Load a list of generated/imported keys from settings dir. Each item is
    a dictionary with key details and the public key.
    """
    logger = configuration.logger
    user_keys = []
    keys_dir = os.path.join(configuration.user_settings,
                            client_id_dir(client_id), user_keys_dir)
    try:
        hits = os.listdir(keys_dir)
    except Exception, exc:
        # This is common for users without transfer keys
        logger.debug("could not find user keys in %s: %s" % (keys_dir, exc))
        return user_keys
Exemple #30
0
def main(client_id, user_arguments_dict):
    """Main function used by front end"""

    (configuration, logger, output_objects, op_name) = \
        initialize_main_variables(client_id)
    client_dir = client_id_dir(client_id)
    status = returnvalues.OK
    defaults = signature()[1]
    (validate_status, accepted) = validate_input_and_cert(
        user_arguments_dict,
        defaults,
        output_objects,
        client_id,
        configuration,
        allow_rejects=False,
        )
    if not validate_status:
        return (accepted, returnvalues.CLIENT_ERROR)

    if not correct_handler('POST'):
        output_objects.append(
            {'object_type': 'error_text', 'text'
             : 'Only accepting POST requests to prevent unintended updates'})
        return (output_objects, returnvalues.CLIENT_ERROR)

    save_as_default = (accepted['save_as_default'][-1] != 'False')
    external_dict = get_keywords_dict(configuration)
    mrsl = fields_to_mrsl(configuration, user_arguments_dict, external_dict)

    tmpfile = None

    # Please note that base_dir must end in slash to avoid access to other
    # user dirs when own name is a prefix of another user name

    base_dir = os.path.abspath(os.path.join(configuration.user_home,
                               client_dir)) + os.sep

    # save to temporary file

    try:
        (filehandle, real_path) = tempfile.mkstemp(text=True)
        relative_path = os.path.basename(real_path)
        os.write(filehandle, mrsl)
        os.close(filehandle)
    except Exception, err:
        output_objects.append({'object_type': 'error_text',
                               'text':
                               'Failed to write temporary mRSL file: %s' % \
                               err})
        return (output_objects, returnvalues.SYSTEM_ERROR)
Exemple #31
0
def main(client_id, user_arguments_dict):
    """Main function used by front end"""

    (configuration, logger, output_objects, op_name) = \
        initialize_main_variables(client_id, op_header=False, op_title=False)
    client_dir = client_id_dir(client_id)
    defaults = signature()[1]
    (validate_status, accepted) = validate_input_and_cert(
        user_arguments_dict,
        defaults,
        output_objects,
        client_id,
        configuration,
        allow_rejects=False,
        )
    if not validate_status:
        return (accepted, returnvalues.CLIENT_ERROR)
    job_id_list = accepted['job_id']
    external_dict = mrslkeywords.get_keywords_dict(configuration)

    # Please note that base_dir must end in slash to avoid access to other
    # user dirs when own name is a prefix of another user name

    base_dir = \
        os.path.abspath(os.path.join(configuration.mrsl_files_dir,
                        client_dir)) + os.sep

    status = returnvalues.OK
    for job_id in job_id_list:

        # job = Job()

        filepath = os.path.join(base_dir, job_id)
        filepath += '.mRSL'

        (new_job_obj_status, new_job_obj) = \
            create_job_object_from_pickled_mrsl(filepath, logger,
                external_dict)
        if not new_job_obj_status:
            output_objects.append({'object_type': 'error_text', 'text'
                                  : new_job_obj})
            status = returnvalues.CLIENT_ERROR
        else:

            # return new_job_obj

            output_objects.append({'object_type': 'jobobj', 'jobobj'
                                  : new_job_obj})
    return (output_objects, status)
Exemple #32
0
def load_atjobs(client_id, configuration, allow_missing=True):
    """Load entries from plain user atjobs file"""
    _logger = configuration.logger
    client_dir = client_id_dir(client_id)
    atjobs_path = os.path.join(configuration.user_settings, client_dir,
                               atjobs_name)
    try:
        atjobs_fd = open(atjobs_path, "rb")
        atjobs_contents = atjobs_fd.read()
        atjobs_fd.close()
    except Exception, exc:
        if not allow_missing:
            _logger.error('failed reading %s atjobs file: %s' % (client_id,
                                                                 exc))
        atjobs_contents = ''
Exemple #33
0
def delete_vm(client_id, configuration, machine_name):
    """Deletes the vm dir with configuration and images for vm with given
    machine_name"""

    # Grab the base directory of the user

    client_dir = client_id_dir(client_id)
    user_home = os.path.abspath(os.path.join(configuration.user_home,
                                             client_dir))
    vms_machine_path = os.path.join(user_home, vm_base, machine_name)
    msg = ''
    success = remove_rec(vms_machine_path, configuration)
    if not success:
        msg = "Error while removing %s" % machine_name
    return (success, msg)
Exemple #34
0
def parse_and_save_pickle(source, destination, keywords, client_id,
                          configuration, strip_space, strip_comments):
    """Use conf parser to parse settings in mRSL file and save resulting
    dictionary in a pickled file in user_settings.
    """
    client_dir = client_id_dir(client_id)
    result = parser.parse(source, strip_space, strip_comments)

    (status, parsemsg) = parser.check_types(result, keywords, configuration)

    try:
        os.remove(source)
    except Exception, err:
        msg = 'Exception removing temporary file %s, %s'\
            % (source, err)
Exemple #35
0
def delete_vm(client_id, configuration, machine_name):
    """Deletes the vm dir with configuration and images for vm with given
    machine_name"""

    # Grab the base directory of the user

    client_dir = client_id_dir(client_id)
    user_home = os.path.abspath(os.path.join(configuration.user_home,
                                             client_dir))
    vms_machine_path = os.path.join(user_home, vm_base, machine_name)
    msg = ''
    success = remove_rec(vms_machine_path, configuration)
    if not success:
        msg = "Error while removing %s" % machine_name
    return (success, msg)
Exemple #36
0
def load_share_links(configuration, client_id):
    """Find all share links owned by user"""
    logger = configuration.logger
    logger.debug("load share links for %s" % client_id)
    try:
        sharelinks_path = os.path.join(configuration.user_settings,
                                       client_id_dir(client_id),
                                       sharelinks_filename)
        logger.debug("load sharelinks from %s" % sharelinks_path)
        if os.path.isfile(sharelinks_path):
            sharelinks = load(sharelinks_path)
        else:
            sharelinks = {}
    except Exception, exc:
        return (False, "could not load saved share links: %s" % exc)
Exemple #37
0
def load_data_transfers(configuration, client_id):
    """Find all data transfers owned by user"""
    logger = configuration.logger
    logger.debug("load transfers for %s" % client_id)
    try:
        transfers_path = os.path.join(configuration.user_settings,
                                      client_id_dir(client_id),
                                      datatransfers_filename)
        logger.debug("load transfers from %s" % transfers_path)
        if os.path.isfile(transfers_path):
            transfers = load(transfers_path)
        else:
            transfers = {}
    except Exception, exc:
        return (False, "could not load saved data transfers: %s" % exc)
Exemple #38
0
def load_crontab(client_id, configuration, allow_missing=True):
    """Load entries from plain user crontab file"""
    _logger = configuration.logger
    client_dir = client_id_dir(client_id)
    crontab_path = os.path.join(configuration.user_settings, client_dir,
                                crontab_name)
    try:
        crontab_fd = open(crontab_path, "rb")
        crontab_contents = crontab_fd.read()
        crontab_fd.close()
    except Exception, exc:
        if not allow_missing:
            _logger.error('failed reading %s crontab file: %s' % (client_id,
                                                                  exc))
        crontab_contents = ''
Exemple #39
0
def arc_job_status(job_dict, configuration, logger):
    """Retrieve status information for a job submitted to ARC.
       Status is returned as a string. In case of failure, returns 
       'UNKNOWN' and logs the error."""

    logger.debug('Checking ARC job status for %s' % job_dict['JOB_ID'])

    userdir = os.path.join(configuration.user_home, \
                           client_id_dir(job_dict['USER_CERT']))
    try:
        jobinfo = {'status': 'UNKNOWN(TO FINISH)'}
        session = arc.Ui(userdir)
        jobinfo = session.jobStatus(job_dict['EXE'])
    except arc.ARCWrapperError, err:
        logger.error('Error during ARC status retrieval: %s'\
                     % err.what())
Exemple #40
0
def load_section_helper(client_id, configuration, section_filename,
                        section_keys, include_meta=False):
    """Load settings section from pickled file. Optional include_meta
    controls the inclusion of meta data like creator and creation time.
    """

    client_dir = client_id_dir(client_id)
    section_path = os.path.join(configuration.user_settings, client_dir,
                                section_filename)
    section_dict = unpickle(section_path, configuration.logger)
    if section_dict and not include_meta:
        real_keys = section_keys
        for key in section_dict.keys():
            if not key in real_keys:
                del section_dict[key]
    return section_dict
Exemple #41
0
def parse_and_save_pickle(source, destination, keywords, client_id,
                          configuration, strip_space, strip_comments):
    """Use conf parser to parse settings in mRSL file and save resulting
    dictionary in a pickled file in user_settings.
    """
    client_dir = client_id_dir(client_id)
    result = parser.parse(source, strip_space, strip_comments)

    (status, parsemsg) = parser.check_types(result, keywords,
            configuration)

    try:
        os.remove(source)
    except Exception, err:
        msg = 'Exception removing temporary file %s, %s'\
             % (source, err)
Exemple #42
0
def main(client_id, user_arguments_dict):
    """Main function used by front end"""

    (configuration, logger, output_objects, op_name) = \
        initialize_main_variables(client_id, op_header=False)
    defaults = signature()[1]
    (validate_status, accepted) = validate_input_and_cert(
        user_arguments_dict,
        defaults,
        output_objects,
        client_id,
        configuration,
        allow_rejects=False,
        )
    if not validate_status:
        return (accepted, returnvalues.CLIENT_ERROR)

    user_dir = os.path.join(configuration.user_home, 
                            client_id_dir(client_id))

    title_entry = find_entry(output_objects, 'title')
    title_entry['text'] = 'ARC Queues'
    output_objects.append({'object_type': 'header', 'text'
                          : 'Available ARC queues'})

    if not configuration.site_enable_griddk:
        output_objects.append({'object_type': 'text', 'text':
                               '''Grid.dk features are disabled on this site.
Please contact the Grid admins %s if you think they should be enabled.
''' % configuration.admin_email})
        return (output_objects, returnvalues.OK)

    # could factor out from here, to be usable from outside
    if not configuration.arc_clusters:
        output_objects.append({'object_type': 'error_text', 'text':
                               'No ARC support!'})
        return (output_objects, returnvalues.ERROR)
    try:
        session = arc.Ui(user_dir)
        queues = session.getQueues()

    except arc.NoProxyError, err:
        output_objects.append({'object_type': 'error_text', 'text'
                              : 'Error while retrieving: %s' % err.what()
                              })
        output_objects += arc.askProxy()
        return (output_objects, returnvalues.ERROR)
Exemple #43
0
def parse_and_save_crontab(crontab, client_id, configuration):
    """Validate and write the crontab for client_id"""
    client_dir = client_id_dir(client_id)
    crontab_path = os.path.join(configuration.user_settings, client_dir,
                                crontab_name)
    status, msg = True, ''
    crontab_entries = parse_crontab_contents(configuration, client_id,
                                             crontab.splitlines())
    try:
        crontab_fd = open(crontab_path, "wb")
        # TODO: filter out broken lines before write?
        crontab_fd.write(crontab)
        crontab_fd.close()
        msg = "Found and saved %d valid crontab entries" % len(crontab_entries)
    except Exception, exc:
        status = False
        msg = 'ERROR: writing %s crontab file: %s' % (client_id, exc)
Exemple #44
0
def parse_and_save_atjobs(atjobs, client_id, configuration):
    """Validate and write the atjobs for client_id"""
    client_dir = client_id_dir(client_id)
    atjobs_path = os.path.join(configuration.user_settings, client_dir,
                               atjobs_name)
    status, msg = True, ''
    atjobs_entries = parse_atjobs_contents(configuration, client_id,
                                           atjobs.splitlines())
    try:
        atjobs_fd = open(atjobs_path, "wb")
        # TODO: filter out broken lines before write?
        atjobs_fd.write(atjobs)
        atjobs_fd.close()
        msg = "Found and saved %d valid atjobs entries" % len(atjobs_entries)
    except Exception, exc:
        status = False
        msg = 'ERROR: writing %s atjobs file: %s' % (client_id, exc)
Exemple #45
0
def main(client_id, user_arguments_dict):
    """Main function used by front end"""

    (configuration, logger, output_objects, op_name) = \
            initialize_main_variables(client_id, op_header=False)
    client_dir = client_id_dir(client_id)
    defaults = signature()[1]
    (validate_status, accepted) = validate_input_and_cert(
      user_arguments_dict,
      defaults,
      output_objects,
      client_id,
      configuration,
      allow_rejects=False,
      )
    
    if not validate_status:
        return (accepted, returnvalues.CLIENT_ERROR)
    
    status = returnvalues.OK

    all_paths = accepted['path']
    entry_path = all_paths[-1]
    title_entry = find_entry(output_objects, 'title')
    title_entry['text'] = 'File Manager'
    title_entry['style'] = css_tmpl(configuration)
    if 'submitjob' in extract_menu(configuration, title_entry):
        enable_submit = 'true'
    else:
        enable_submit = 'false'
    title_entry['javascript'] = js_tmpl(entry_path, enable_submit,
                                        str(configuration.site_enable_preview))
    
    output_objects.append({'object_type': 'header', 'text': 'File Manager' })
    output_objects.append({'object_type': 'html_form', 'text':
                           html_tmpl(configuration, title_entry)})

    if len(all_paths) > 1:
        output_objects.append({'object_type': 'sectionheader', 'text':
                               'All requested paths:'})
        for path in all_paths:
            output_objects.append({'object_type': 'link', 'text': path,
                                   'destination': 'fileman.py?path=%s' % path})
            output_objects.append({'object_type': 'text', 'text': ''})

    return (output_objects, status)
Exemple #46
0
def modify_data_transfers(configuration,
                          client_id,
                          transfer_dict,
                          action,
                          transfers=None):
    """Modify data transfers with given action and transfer_dict for client_id.
    In practice this a shared helper to add or remove transfers from the saved
    data transfers. The optional transfers argument can be used to pass an
    already loaded dictionary of saved transfers to avoid reloading.
    """
    logger = configuration.logger
    transfer_id = transfer_dict['transfer_id']
    if transfers is None:
        (load_status, transfers) = load_data_transfers(configuration,
                                                       client_id)
        if not load_status:
            logger.error("modify_data_transfers failed in load: %s" % \
                         transfers)
            return (load_status, transfers)

    if action == "create":
        now = datetime.datetime.now()
        transfer_dict.update({
            'created_timestamp': now,
            'updated_timestamp': now,
            'owner': client_id,
        })
        transfers[transfer_id] = transfer_dict
    elif action == "modify":
        transfer_dict['updated_timestamp'] = datetime.datetime.now()
        transfers[transfer_id].update(transfer_dict)
    elif action == "delete":
        del transfers[transfer_id]
    else:
        return (False, "Invalid action %s on data transfers" % action)

    try:
        transfers_path = os.path.join(configuration.user_settings,
                                      client_id_dir(client_id),
                                      datatransfers_filename)
        dump(transfers, transfers_path)
        res_dir = get_status_dir(configuration, client_id, transfer_id)
        makedirs_rec(res_dir, configuration)
    except Exception, err:
        logger.error("modify_data_transfers failed: %s" % err)
        return (False, 'Error updating data transfers: %s' % err)
Exemple #47
0
def parse_form_upload(user_args, client_id, configuration):
    """Parse upload file entries from user_args"""
    files, rejected = [], []
    i = 0
    client_dir = client_id_dir(client_id)
    for i in xrange(max_freeze_files):
        if user_args.has_key('freeze_upload_%d' % i):
            file_item = user_args['freeze_upload_%d' % i]
            filename = user_args.get('freeze_upload_%dfilename' % i,
                                     '')
            if not filename.strip():
                continue
            filename = strip_dir(filename)
            try:
                valid_path(filename)
            except Exception, exc:
                rejected.append('invalid filename: %s (%s)' % (filename, exc))
                continue
            files.append((filename, file_item[0]))
Exemple #48
0
def enqueue_vm(client_id, configuration, machine_name, machine_req):
    """Submit a machine job based on machine definition file and overrides
    from machine_req.
    Returns the job submit result, a 3-tuple of (status, msg, job_id)
    """

    specs = default_vm_specs(configuration)
    specs.update(machine_req)

    # Setup paths - filter above prevents directory traversal

    client_dir = client_id_dir(client_id)
    user_home = os.path.abspath(os.path.join(configuration.user_home,
                                             client_dir))
    user_vms_home = os.path.join(user_home, vm_base)
    vm_home = os.path.join(user_vms_home, machine_name)
    location_fd = open(os.path.join(vm_home, sys_location), 'r')
    (sys_re, sys_base, sys_disk) = location_fd.read().split(':')
    location_fd.close()
    data_disk = '%(os)s-%(vm_arch)s-data.%(disk_format)s' % specs
    run_script = 'run%(hypervisor)svm.sh' % specs

    specs.update({'name': machine_name, 'data_disk': data_disk, 'run_script':
                  run_script, 'vm_base': vm_base, 'sys_re': sys_re, 'sys_base':
                  sys_base, 'sys_disk': sys_disk})
    if specs['hypervisor_re']:
        specs['runtime_env'].append(specs['hypervisor_re'])
    if specs['sys_re']:
        specs['runtime_env'].append(specs['sys_re'])
    
    # Generate the mrsl and write to a temp file which is removed on close

    mrsl = mig_vbox_deploy_job(client_id, configuration, machine_name,
                                 specs)
    mrsl_fd = NamedTemporaryFile()
    mrsl_fd.write(mrsl)
    mrsl_fd.flush()

    # Submit job and clean up

    res = new_job(mrsl_fd.name, client_id, configuration, False, True)
    mrsl_fd.close()
    return res
Exemple #49
0
def parse_form_upload(user_args, client_id, configuration):
    """Parse upload file entries from user_args"""
    files, rejected = [], []
    i = 0
    client_dir = client_id_dir(client_id)
    for i in xrange(max_freeze_files):
        if user_args.has_key('freeze_upload_%d' % i):
            file_item = user_args['freeze_upload_%d' % i]
            filename = user_args.get('freeze_upload_%dfilename' % i,
                                     '')
            if not filename.strip():
                continue
            filename = strip_dir(filename)
            try:
                valid_path(filename)
            except Exception, exc:
                rejected.append('invalid filename: %s (%s)' % (filename, exc))
                continue
            files.append((filename, file_item[0]))
Exemple #50
0
def _load_auth_pw_keys(client_id, configuration, proto, proto_conf_dir):
    """Helper to load  keys and password for proto (ssh/davs/ftps) from user
    proto_conf_dir.
    """
    section_dict = {}
    client_dir = client_id_dir(client_id)
    keys_path = os.path.join(configuration.user_home, client_dir,
                             proto_conf_dir, authkeys_filename)
    pw_path = os.path.join(configuration.user_home, client_dir,
                           proto_conf_dir, authpasswords_filename)
    digest_path = os.path.join(configuration.user_home, client_dir,
                           proto_conf_dir, authdigests_filename)
    try:
        keys_fd = open(keys_path)
        section_dict['authkeys'] = keys_fd.read()
        keys_fd.close()
    except Exception, exc:
        configuration.logger.error("load %s publickeys failed: %s" % (proto,
                                                                      exc))
Exemple #51
0
def enqueue_vm(client_id, configuration, machine_name, machine_req):
    """Submit a machine job based on machine definition file and overrides
    from machine_req.
    Returns the job submit result, a 3-tuple of (status, msg, job_id)
    """

    specs = default_vm_specs(configuration)
    specs.update(machine_req)

    # Setup paths - filter above prevents directory traversal

    client_dir = client_id_dir(client_id)
    user_home = os.path.abspath(os.path.join(configuration.user_home,
                                             client_dir))
    user_vms_home = os.path.join(user_home, vm_base)
    vm_home = os.path.join(user_vms_home, machine_name)
    location_fd = open(os.path.join(vm_home, sys_location), 'r')
    (sys_re, sys_base, sys_disk) = location_fd.read().split(':')
    location_fd.close()
    data_disk = '%(os)s-%(vm_arch)s-data.%(disk_format)s' % specs
    run_script = 'run%(hypervisor)svm.sh' % specs

    specs.update({'name': machine_name, 'data_disk': data_disk, 'run_script':
                  run_script, 'vm_base': vm_base, 'sys_re': sys_re, 'sys_base':
                  sys_base, 'sys_disk': sys_disk})
    if specs['hypervisor_re']:
        specs['runtime_env'].append(specs['hypervisor_re'])
    if specs['sys_re']:
        specs['runtime_env'].append(specs['sys_re'])
    
    # Generate the mrsl and write to a temp file which is removed on close

    mrsl = mig_vbox_deploy_job(client_id, configuration, machine_name,
                                 specs)
    mrsl_fd = NamedTemporaryFile()
    mrsl_fd.write(mrsl)
    mrsl_fd.flush()

    # Submit job and clean up

    res = new_job(mrsl_fd.name, client_id, configuration, False, True)
    mrsl_fd.close()
    return res
Exemple #52
0
def main(client_id, user_arguments_dict):
    """Main function used by front end"""

    (configuration, logger, output_objects, op_name) = \
        initialize_main_variables(client_id)
    client_dir = client_id_dir(client_id)
    defaults = signature()[1]
    (validate_status, accepted) = validate_input_and_cert(
        user_arguments_dict,
        defaults,
        output_objects,
        client_id,
        configuration,
        allow_rejects=False,
        )
    if not validate_status:
        return (accepted, returnvalues.CLIENT_ERROR)

    # Please note that base_dir must end in slash to avoid access to other
    # user dirs when own name is a prefix of another user name

    base_dir = os.path.abspath(os.path.join(configuration.user_home,
                               client_dir)) + os.sep

    status = returnvalues.OK

    pid = 0
    pidfile = os.path.join(base_dir, '.Xvnc4.pid')
    try:
        fd = open(pidfile, 'r')
        pid = int(fd.readline())
        fd.close()
        os.remove(pidfile)
        os.kill(pid, 9)
        output_objects.append({'object_type': 'text', 'text'
                              : 'stopped vnc'})
    except Exception, err:
        logger.error('Unable to extract pid and kill vnc process: %s'
                      % err)
        status = returnvalues.CLIENT_ERROR
        output_objects.append({'object_type': 'text', 'text'
                              : 'failed to stop vnc'})
Exemple #53
0
def pack_archive(
    configuration,
    client_id,
    src,
    dst,
    ):
    """Inside the user home of client_id: pack the src_path into a zip or tar
    archive in dst. Both src and dst are expected to be relative
    paths.
    Please note that src and dst should be checked for illegal directory
    traversal attempts before getting here.
    """
    logger = configuration.logger
    msg = ''
    status = True
    client_dir = client_id_dir(client_id)

    # Please note that base_dir must end in slash to avoid access to other
    # user dirs when own name is a prefix of another user name

    base_dir = os.path.abspath(os.path.join(configuration.user_home,
                               client_dir)) + os.sep
    real_src = os.path.join(base_dir, src.lstrip(os.sep))
    real_dst = os.path.join(base_dir, dst.lstrip(os.sep))

    # Pack in same path with zip extension unless dst is given

    if not dst:
        real_dst = real_src + '.zip'

    # create sub dir(s) if missing

    zip_entry_dir = os.path.dirname(real_dst)
    if not os.path.isdir(zip_entry_dir):
        logger.info("make zip parent dir: %s" % zip_entry_dir)
        msg += 'Creating dir %s . ' % zip_entry_dir
        try:
            os.makedirs(zip_entry_dir, 0775)
        except Exception, exc:
            logger.error("create directory failed: %s" % exc)
            msg += 'Error creating parent directory %s! ' % exc
            return (False, msg)
Exemple #54
0
def arc_job_status(
    job_dict,
    configuration,
    logger
    ):
    """Retrieve status information for a job submitted to ARC.
       Status is returned as a string. In case of failure, returns 
       'UNKNOWN' and logs the error."""
    
    logger.debug('Checking ARC job status for %s' % job_dict['JOB_ID'])

    userdir = os.path.join(configuration.user_home, \
                           client_id_dir(job_dict['USER_CERT']))
    try:
        jobinfo = {'status':'UNKNOWN(TO FINISH)'}
        session = arc.Ui(userdir)
        jobinfo = session.jobStatus(job_dict['EXE'])
    except arc.ARCWrapperError, err:
        logger.error('Error during ARC status retrieval: %s'\
                     % err.what())
Exemple #55
0
def handle_update(configuration, client_id, resource_id, user_vars,
                    output_objects, new_resource=False):
    """Update existing resource configuration from request"""

    logger = configuration.logger
    client_dir = client_id_dir(client_id)
    tmp_id = "%s.%s" % (user_vars['HOSTURL'], time.time())
    pending_file = os.path.join(configuration.resource_pending, client_dir,
                                tmp_id)
    conf_file = os.path.join(configuration.resource_home, resource_id,
                             'config.MiG')
    output = ''
    try:
        logger.info('write to file: %s' % pending_file)
        write_resource_config(configuration, user_vars, pending_file)
    except Exception, err:
        logger.error('Resource conf %s could not be written: %s' % \
                     (pending_file, err))
        output_objects.append({'object_type': 'error_text', 'text':
                               'Could not write configuration!'})
        return False
Exemple #56
0
def unpack_archive(
    configuration,
    client_id,
    src,
    dst,
    ):
    """Inside the user home of client_id: unpack the src zip or tar
    archive into the dst dir. Both src and dst are expected to be relative
    paths.
    Please note that src and dst should be checked for illegal directory
    traversal attempts before getting here.
    """
    client_dir = client_id_dir(client_id)

    # Please note that base_dir must end in slash to avoid access to other
    # user dirs when own name is a prefix of another user name

    base_dir = os.path.abspath(os.path.join(configuration.user_home,
                               client_dir)) + os.sep
    real_src = os.path.join(base_dir, src.lstrip(os.sep))
    return handle_package_upload(real_src, src, client_id,
                                 configuration, False, dst)
Exemple #57
0
def _parse_form_xfer(xfer, user_args, client_id, configuration):
    """Parse xfer request (i.e. copy, move or upload) file/dir entries from
    user_args.
    """
    files, rejected = [], []
    i = 0
    client_dir = client_id_dir(client_id)
    base_dir = os.path.abspath(os.path.join(configuration.user_home,
                               client_dir)) + os.sep
    xfer_pattern = 'freeze_%s_%%d' % xfer 
    for i in xrange(max_freeze_files):
        if user_args.has_key(xfer_pattern % i):
            source_path = user_args[xfer_pattern % i][-1].strip()
            configuration.logger.debug('found %s entry: %s' % (xfer,
                                                               source_path))
            if not source_path:
                continue
            try:
                valid_path(source_path)
            except Exception, exc:
                rejected.append('invalid path: %s (%s)' % (source_path,
                                                           exc))
                continue
            source_path = os.path.normpath(source_path).lstrip(os.sep)
            real_path = os.path.abspath(os.path.join(base_dir, source_path))
            if not valid_user_path(real_path, base_dir, True):
                rejected.append('invalid path: %s (%s)' % \
                                (source_path, 'illegal path!'))
                continue
            # expand any dirs recursively
            if os.path.isdir(real_path):
                for (root, dirnames, filenames) in os.walk(real_path):
                    for subname in filenames:
                        real_sub = os.path.join(root, subname)
                        sub_base = root.replace(real_path, source_path)
                        sub_path = os.path.join(sub_base, subname)
                        files.append((real_sub, sub_path))
            else:
                files.append((real_path, source_path))
Exemple #58
0
def clean_arc_job(
    job_dict, 
    status,
    msg,
    configuration,
    logger,
    kill = True,
    timestamp = None
    ):
    """Cleaning remainder of an executed ARC job:
        - delete from ARC (and possibly kill the job, parameter)
        - delete two symbolic links (user dir and mrsl file)
        - write status and timestamp into mrsl 
    """


    logger.debug('Cleanup for ARC job %s, status %s' % (job_dict['JOB_ID'], status))

    if not status in ['FINISHED', 'CANCELED', 'FAILED']:
        logger.error('inconsistent cleanup request: %s for job %s' % \
                     (status, job_dict))
        return

    # done by the caller...
    # executing_queue.dequeue_job_by_id(job_dict['JOB_ID'])

    if not timestamp:
        timestamp = time.gmtime()
    client_dir = client_id_dir(job_dict['USER_CERT'])

    # clean up in ARC
    try:
        userdir = os.path.join(configuration.user_home, client_dir)
        arcsession = arc.Ui(userdir)
    except Exception, err:
        logger.error('Error cleaning up ARC job: %s' % err)
        logger.debug('Job was: %s' % job_dict)
Exemple #59
0
def update_section_helper(client_id, configuration, section_filename, changes,
                          defaults, create_missing=True):
    """Update settings section in pickled file with values from changes
    dictionary. Optional create_missing can be used if the pickle should be
    created if not already there.
    The defaults dictionary is used to set any missing values.
    """

    client_dir = client_id_dir(client_id)
    section_path = os.path.join(configuration.user_settings, client_dir,
                                 section_filename)
    if not os.path.exists(section_path):
        if create_missing:
            section_dict = {}
        else:
            raise Exception('no %s file to update!' % section_filename)
    else:
        section_dict = unpickle(section_path, configuration.logger)
    for (key, val) in defaults.items():
        section_dict[key] = section_dict.get(key, val)
    section_dict.update(changes)
    if not pickle(section_dict, section_path, configuration.logger):
        raise Exception('could not save updated %s file!' % section_filename)
    return section_dict
Exemple #60
0
 def checkLogin(self, username, password):
     """Check username and password in MiG user DB""" 
     db_path = os.path.join(configuration.mig_code_base, 'server',
                            'MiG-users.db')
     # print "Loading user DB"
     id_map = load_user_db(db_path)
     # username may be None here
     login_url = os.path.join(configuration.user_openid_providers[0],
                              username or '')
     distinguished_name = get_openid_user_dn(configuration, login_url)
     if distinguished_name in id_map:
         user = id_map[distinguished_name]
         print "looked up user %s in DB: %s" % (username, user)
         enc_pw = user.get('password', None)
         # print "DEBUG: Check password against enc %s" % enc_pw
         if password and base64.b64encode(password) == user['password']:
             print "Correct password for user %s" % username
             self.user_dn = distinguished_name
             self.user_dn_dir = client_id_dir(distinguished_name)
             return True
         else:
             print "Failed password check for user %s" % username
     print "Invalid login for user %s" % username
     return False