Beispiel #1
0
def handle_file(
    configuration,
    listing,
    filename,
    file_with_dir,
    actual_file,
    flags='',
    dest='',
    show_dest=False,
):
    """handle a file"""

    # Build entire line before printing to avoid newlines

    # Recursion can get here when called without explicit invisible files

    if invisible_path(file_with_dir):
        return
    file_obj = {
        'object_type': 'direntry',
        'type': 'file',
        'name': filename,
        'rel_path': file_with_dir,
        'rel_path_enc': quote(file_with_dir),
        'rel_dir_enc': quote(os.path.dirname(file_with_dir)),
        # NOTE: file_with_dir is kept for backwards compliance
        'file_with_dir': file_with_dir,
        'flags': flags,
    }

    if show_dest:
        file_obj['file_dest'] = dest

    listing.append(file_obj)
Beispiel #2
0
def handle_file(
    configuration,
    listing,
    filename,
    file_with_dir,
    actual_file,
    flags='',
):
    """handle a file"""

    # Build entire line before printing to avoid newlines

    # Recursion can get here when called without explicit invisible files

    if invisible_path(file_with_dir):
        return
    special = ''
    file_obj = {
        'object_type': 'direntry',
        'type': 'file',
        'name': filename,
        'rel_path': file_with_dir,
        'rel_path_enc': quote(file_with_dir),
        'rel_dir_enc': quote(os.path.dirname(file_with_dir)),
        # NOTE: file_with_dir is kept for backwards compliance
        'file_with_dir': file_with_dir,
        'flags': flags,
        'special': special,
    }
    if long_list(flags):
        file_obj['long_format'] = long_format(actual_file)

    if file_info(flags):
        file_obj['file_info'] = fileinfo_stat(actual_file)

    listing.append(file_obj)
Beispiel #3
0
def main(client_id, user_arguments_dict):
    """Main function used by front end"""

    (configuration, logger, output_objects, op_name) = \
        initialize_main_variables(client_id)
    client_dir = client_id_dir(client_id)
    defaults = signature()[1]
    (validate_status, accepted) = validate_input_and_cert(
        user_arguments_dict,
        defaults,
        output_objects,
        client_id,
        configuration,
        allow_rejects=False,
        # NOTE: path can use wildcards, dst and current_dir cannot
        typecheck_overrides={'path': valid_path_pattern},
    )
    if not validate_status:
        return (accepted, returnvalues.CLIENT_ERROR)

    flags = ''.join(accepted['flags'])
    pattern_list = accepted['path']
    dst = accepted['dst'][-1]
    current_dir = accepted['current_dir'][-1].lstrip(os.sep)

    # All paths are relative to current_dir

    pattern_list = [os.path.join(current_dir, i) for i in pattern_list]
    if dst:
        dst = os.path.join(current_dir, dst)

    # Please note that base_dir must end in slash to avoid access to other
    # user dirs when own name is a prefix of another user name

    base_dir = os.path.abspath(
        os.path.join(configuration.user_home, client_dir)) + os.sep

    status = returnvalues.OK

    if verbose(flags):
        for flag in flags:
            output_objects.append({
                'object_type': 'text',
                'text': '%s using flag: %s' % (op_name, flag)
            })

    # IMPORTANT: path must be expanded to abs for proper chrooting
    abs_dir = os.path.abspath(
        os.path.join(base_dir, current_dir.lstrip(os.sep)))
    if not valid_user_path(configuration, abs_dir, base_dir, True):
        output_objects.append({
            'object_type':
            'error_text',
            'text':
            "You're not allowed to work in %s!" % current_dir
        })
        logger.warning('%s tried to %s restricted path %s ! (%s)' %
                       (client_id, op_name, abs_dir, current_dir))
        return (output_objects, returnvalues.CLIENT_ERROR)

    if verbose(flags):
        output_objects.append({
            'object_type': 'text',
            'text': "working in %s" % current_dir
        })

    if dst:
        if not safe_handler(configuration, 'post', op_name, client_id,
                            get_csrf_limit(configuration), accepted):
            output_objects.append({
                'object_type':
                'error_text',
                'text':
                '''Only accepting
                CSRF-filtered POST requests to prevent unintended updates'''
            })
            return (output_objects, returnvalues.CLIENT_ERROR)

        # NOTE: dst already incorporates current_dir prefix here
        # IMPORTANT: path must be expanded to abs for proper chrooting
        abs_dest = os.path.abspath(os.path.join(base_dir, dst))
        logger.info('chksum in %s' % abs_dest)

        # Don't use abs_path in output as it may expose underlying
        # fs layout.

        relative_dest = abs_dest.replace(base_dir, '')
        if not valid_user_path(configuration, abs_dest, base_dir, True):
            output_objects.append({
                'object_type':
                'error_text',
                'text':
                "Invalid path! (%s expands to an illegal path)" % dst
            })
            logger.warning('%s tried to %s restricted path %s !(%s)' %
                           (client_id, op_name, abs_dest, dst))
            return (output_objects, returnvalues.CLIENT_ERROR)
        if not check_write_access(abs_dest, parent_dir=True):
            logger.warning('%s called without write access: %s' %
                           (op_name, abs_dest))
            output_objects.append({
                'object_type':
                'error_text',
                'text':
                'cannot checksum to "%s": inside a read-only location!' %
                relative_dest
            })
            return (output_objects, returnvalues.CLIENT_ERROR)

    all_lines = []
    for pattern in pattern_list:

        # Check directory traversal attempts before actual handling to avoid
        # leaking information about file system layout while allowing
        # consistent error messages

        unfiltered_match = glob.glob(base_dir + pattern)
        match = []
        for server_path in unfiltered_match:
            # IMPORTANT: path must be expanded to abs for proper chrooting
            abs_path = os.path.abspath(server_path)
            if not valid_user_path(configuration, abs_path, base_dir, True):

                # out of bounds - save user warning for later to allow
                # partial match:
                # ../*/* is technically allowed to match own files.

                logger.warning('%s tried to %s restricted path %s ! (%s)' %
                               (client_id, op_name, abs_path, pattern))
                continue
            match.append(abs_path)

        # Now actually treat list of allowed matchings and notify if no
        # (allowed) match

        if not match:
            output_objects.append({
                'object_type': 'file_not_found',
                'name': pattern
            })
            status = returnvalues.FILE_NOT_FOUND

        # NOTE: we produce output matching an invocation of:
        # du -aL --apparent-size --block-size=1 PATH [PATH ...]
        filedus = []
        summarize_output = summarize(flags)
        for abs_path in match:
            if invisible_path(abs_path):
                continue
            relative_path = abs_path.replace(base_dir, '')
            # cache accumulated sub dir sizes - du sums into parent dir size
            dir_sizes = {}
            try:
                # Assume a directory to walk
                for (root, dirs, files) in walk(abs_path,
                                                topdown=False,
                                                followlinks=True):
                    if invisible_path(root):
                        continue
                    dir_bytes = 0
                    for name in files:
                        real_file = os.path.join(root, name)
                        if invisible_path(real_file):
                            continue
                        relative_file = real_file.replace(base_dir, '')
                        size = os.path.getsize(real_file)
                        dir_bytes += size
                        if not summarize_output:
                            filedus.append({
                                'object_type': 'filedu',
                                'name': relative_file,
                                'bytes': size
                            })
                    for name in dirs:
                        real_dir = os.path.join(root, name)
                        if invisible_path(real_dir):
                            continue
                        dir_bytes += dir_sizes[real_dir]
                    relative_root = root.replace(base_dir, '')
                    dir_bytes += os.path.getsize(root)
                    dir_sizes[root] = dir_bytes
                    if root == abs_path or not summarize_output:
                        filedus.append({
                            'object_type': 'filedu',
                            'name': relative_root,
                            'bytes': dir_bytes
                        })
                if os.path.isfile(abs_path):
                    # Fall back to plain file where walk is empty
                    size = os.path.getsize(abs_path)
                    filedus.append({
                        'object_type': 'filedu',
                        'name': relative_path,
                        'bytes': size
                    })
            except Exception as exc:
                output_objects.append({
                    'object_type':
                    'error_text',
                    'text':
                    "%s: '%s': %s" % (op_name, relative_path, exc)
                })
                logger.error("%s: failed on '%s': %s" %
                             (op_name, relative_path, exc))
                status = returnvalues.SYSTEM_ERROR
                continue
        if dst:
            all_lines += [
                '%(bytes)d\t\t%(name)s\n' % entry for entry in filedus
            ]
        else:
            output_objects.append({
                'object_type': 'filedus',
                'filedus': filedus
            })

    if dst and not write_file(''.join(all_lines), abs_dest, logger):
        output_objects.append({
            'object_type':
            'error_text',
            'text':
            "failed to write disk use to %s" % relative_dest
        })
        logger.error("writing disk use to %s for %s failed" %
                     (abs_dest, client_id))
        status = returnvalues.SYSTEM_ERROR

    return (output_objects, status)
Beispiel #4
0
def pack_archive(
    configuration,
    client_id,
    src,
    dst,
):
    """Inside the user home of client_id: pack the src_path into a zip or tar
    archive in dst. Both src and dst are expected to be relative
    paths.
    Please note that src and dst should be checked for illegal directory
    traversal attempts *before* getting here.
    """
    logger = configuration.logger
    msg = ''
    status = True
    client_dir = client_id_dir(client_id)

    # Please note that base_dir must end in slash to avoid access to other
    # user dirs when own name is a prefix of another user name

    base_dir = os.path.abspath(os.path.join(configuration.user_home,
                                            client_dir)) + os.sep
    real_src = os.path.join(base_dir, src.lstrip(os.sep))
    real_dst = os.path.join(base_dir, dst.lstrip(os.sep))

    # Pack in same path with zip extension unless dst is given

    if not dst:
        real_dst = real_src + '.zip'

    # create sub dir(s) if missing

    zip_entry_dir = os.path.dirname(real_dst)
    if not os.path.isdir(zip_entry_dir):
        logger.debug("make zip parent dir: %s" % zip_entry_dir)
        msg += 'Creating dir %s . ' % zip_entry_dir
        try:
            os.makedirs(zip_entry_dir, 0o775)
        except Exception as exc:
            logger.error("create directory failed: %s" % exc)
            msg += 'Error creating parent directory %s! ' % exc
            return (False, msg)

    real_dst_lower = real_dst.lower()
    real_src_dir = os.path.dirname(real_src)
    open_mode = "w"
    if real_dst_lower.endswith('.zip'):

        # Handle .zip file

        msg += "Requested packing of %s in %s . " % (src, dst)
        try:
            # Force compression and allow files bigger than 2GB
            pack_file = zipfile.ZipFile(real_dst, open_mode,
                                        zipfile.ZIP_DEFLATED, allowZip64=True)
        except Exception as exc:
            logger.error("create zip failed: %s" % exc)
            msg += 'Could not create zipfile: %s! ' % exc
            return (False, msg)

        if os.path.isdir(real_src):
            walker = walk(real_src)
        else:
            (root, filename) = os.path.split(real_src)
            walker = ((root + os.sep, [], [filename]), )
        for (root, _, files) in walker:
            relative_root = root.replace(real_src_dir + os.sep, '')
            for entry in files:
                real_target = os.path.join(root, entry)
                relative_target = os.path.join(relative_root,
                                               entry)
                if invisible_path(real_target):
                    logger.warning('skipping hidden file: %s'
                                   % real_target)
                    continue
                elif real_dst == real_target:
                    msg += 'Skipping destination file %s . ' % dst
                    continue
                logger.debug("pack file %s" % relative_target)
                try:
                    pack_file.write(real_target, relative_target)
                except Exception as exc:
                    logger.error('write of %s failed: %s' %
                                 (real_target, exc))
                    msg += 'Failed to write file %s . ' % relative_target
                    status = False
                    continue

            if not files and not invisible_path(relative_root):
                logger.debug("pack dir %s" % relative_root)
                try:
                    dir_info = zipfile.ZipInfo(relative_root + os.sep)
                    pack_file.writestr(dir_info, '')
                except Exception as exc:
                    logger.error('write of %s failed: %s' %
                                 (real_target, exc))
                    msg += 'Failed to write dir %s . ' % relative_root
                    status = False
                    continue
        pack_file.close()

        # Verify CRC

        try:
            pack_file = zipfile.ZipFile(real_dst, 'r', allowZip64=True)
            pack_file.testzip()
            pack_file.close()
        except Exception as exc:
            logger.error("verify zip failed: %s" % exc)
            msg += "Could not open and verify zip file: %s! " % exc
            status = False
    elif real_dst_lower.endswith('.tar') or \
            real_dst_lower.endswith('.tar.gz') or \
            real_dst_lower.endswith('.tgz') or \
            real_dst_lower.endswith('.tar.bz2') or \
            real_dst_lower.endswith('.tbz'):

        # Handle possibly compressed .tar files
        if real_dst_lower.endswith('.tar.gz') or \
                real_dst_lower.endswith('.tgz'):
            open_mode += ':gz'
        elif real_dst_lower.endswith('.tar.bz2') or \
                real_dst_lower.endswith('.tbz'):
            open_mode += ':bz2'
        else:
            # uncompressed tar
            pass

        try:
            pack_file = tarfile.open(real_dst, open_mode)
        except Exception as exc:
            logger.error("create tar (%s) failed: %s" % (open_mode, exc))
            msg += 'Could not open .tar file: %s! ' % exc
            return (False, msg)

        logger.info("pack entries of %s to %s" %
                    (real_src, real_dst))

        if os.path.isdir(real_src):
            walker = walk(real_src)
        else:
            (root, filename) = os.path.split(real_src)
            walker = ((root + os.sep, [], [filename]), )
        for (root, _, files) in walker:
            relative_root = root.replace(real_src_dir + os.sep, '')
            for entry in files:
                real_target = os.path.join(root, entry)
                relative_target = os.path.join(relative_root, entry)
                if invisible_path(real_target):
                    logger.warning('skipping hidden file: %s'
                                   % real_target)
                    continue
                elif real_dst == real_target:
                    msg += 'Skipping destination file %s . ' % dst
                    continue
                logger.debug("pack file %s" % entry)
                try:
                    pack_file.add(real_target, relative_target,
                                  recursive=False)
                except Exception as exc:
                    logger.error('write of %s failed: %s' %
                                 (real_target, exc))
                    msg += 'Failed to write file %s . ' % relative_target
                    status = False
                    continue

            if not files and not invisible_path(relative_root):
                logger.debug("pack dir %s" % relative_root)
                try:
                    pack_file.add(root, relative_root, recursive=False)
                except Exception as exc:
                    logger.error('write of %s failed: %s' %
                                 (real_target, exc))
                    msg += 'Failed to write dir %s . ' % relative_root
                    status = False
                    continue

        pack_file.close()
    else:
        logger.error("Pack called with unsupported archive format: %s" % dst)
        msg += "Unknown/unsupported archive format: %s" % dst
        return (False, msg)

    if status:
        msg += 'Wrote archive in file %s . ' % dst
    else:
        msg = """Packed archive with one or more errors:
 %s""" % msg

    return (status, msg)
Beispiel #5
0
def handle_ls(
    configuration,
    output_objects,
    listing,
    base_dir,
    real_path,
    flags='',
    depth=0,
):
    """Recursive function to emulate GNU ls (-R)"""

    # Sanity check

    if depth > 255:
        output_objects.append({
            'object_type': 'error_text',
            'text': 'Error: file recursion maximum exceeded!'
        })
        return (output_objects, returnvalues.SYSTEM_ERROR)

    # references to '.' or similar are stripped by abspath

    if real_path + os.sep == base_dir:
        base_name = relative_path = '.'
    else:
        base_name = os.path.basename(real_path)
        relative_path = real_path.replace(base_dir, '')

    # Recursion can get here when called without explicit invisible files

    if invisible_path(relative_path):
        return

    if os.path.isfile(real_path):
        handle_file(configuration, listing, base_name, relative_path,
                    real_path, flags)
    else:
        try:
            contents = os.listdir(real_path)
        except Exception as exc:
            output_objects.append({
                'object_type':
                'error_text',
                'text':
                'Failed to list contents of %s: %s' % (base_name, exc)
            })
            return (output_objects, returnvalues.SYSTEM_ERROR)

        # Filter out dot files unless '-a' is used

        if not all(flags):
            contents = [i for i in contents if not i.startswith('.')]
        contents.sort()

        if not recursive(flags) or depth < 0:

            # listdir does not include '.' and '..' - add manually
            # to ease navigation

            if all(flags):
                handle_dir(configuration, listing, '.', relative_path,
                           real_path, flags)
                handle_dir(configuration, listing, '..',
                           os.path.dirname(relative_path),
                           os.path.dirname(real_path), flags)
            for name in contents:
                path = real_path + os.sep + name
                rel_path = path.replace(base_dir, '')
                if os.path.isfile(path):
                    handle_file(configuration, listing, name, rel_path, path,
                                flags)
                else:
                    handle_dir(configuration, listing, name, rel_path, path,
                               flags)
        else:

            # Force pure content listing first by passing a negative depth

            handle_ls(
                configuration,
                output_objects,
                listing,
                base_dir,
                real_path,
                flags,
                -1,
            )

            for name in contents:
                path = real_path + os.sep + name
                rel_path = path.replace(base_dir, '')
                if os.path.isdir(path):
                    handle_ls(
                        configuration,
                        output_objects,
                        listing,
                        base_dir,
                        path,
                        flags,
                        depth + 1,
                    )
Beispiel #6
0
def handle_dir(
    configuration,
    listing,
    dirname,
    dirname_with_dir,
    actual_dir,
    flags='',
):
    """handle a dir"""

    # Recursion can get here when called without explicit invisible files

    if invisible_path(dirname_with_dir):
        return
    special = ''
    extra_class = ''
    real_dir = os.path.realpath(actual_dir)
    abs_dir = os.path.abspath(actual_dir)
    # If we followed a symlink it is not a plain dir
    if real_dir != abs_dir:
        access_type = configuration.site_vgrid_label
        parent_dir = os.path.basename(os.path.dirname(actual_dir))
        # configuration.logger.debug("checking link %s type (%s)" % \
        #                           (dirname_with_dir, real_dir))
        # Separate vgrid special dirs from plain ones
        if in_vgrid_share(configuration, actual_dir) == dirname_with_dir:
            dir_type = 'shared files'
            extra_class = 'vgridshared'
        elif in_vgrid_readonly(configuration, actual_dir) == dirname_with_dir:
            access_type = 'read-only'
            dir_type = 'shared files'
            extra_class = 'vgridshared readonly'
        elif in_vgrid_pub_web(configuration, actual_dir) == \
                dirname_with_dir[len('public_base/'):]:
            dir_type = 'public web page'
            extra_class = 'vgridpublicweb'
        elif in_vgrid_priv_web(configuration, actual_dir) == \
                dirname_with_dir[len('private_base/'):]:
            dir_type = 'private web page'
            extra_class = 'vgridprivateweb'
        # NOTE: in_vgrid_X returns None on miss, so don't use replace directly
        elif (in_vgrid_store_res(configuration, actual_dir)
              or '').replace(os.sep,
                             '_') == os.path.basename(dirname_with_dir):
            dir_type = 'storage resource files'
            if os.path.islink(actual_dir):
                extra_class = 'vgridstoreres'
        elif real_dir.startswith(configuration.seafile_mount):
            access_type = 'read-only'
            dir_type = 'Seafile library access'
            if os.path.islink(actual_dir):
                extra_class = 'seafilereadonly'
        elif real_dir.find(trash_destdir) != -1:
            access_type = 'recently deleted data'
            dir_type = 'sub'
        else:
            dir_type = 'sub'
        # TODO: improve this greedy matching here?
        # configuration.logger.debug("check real_dir %s vs %s" % (real_dir,
        #                                                        trash_destdir))
        if real_dir.endswith(trash_destdir):
            dir_type = ''
            extra_class = 'trashbin'
        special = ' - %s %s directory' % (access_type, dir_type)
    dir_obj = {
        'object_type': 'direntry',
        'type': 'directory',
        'name': dirname,
        'rel_path': dirname_with_dir,
        'rel_path_enc': quote(dirname_with_dir),
        'rel_dir_enc': quote(dirname_with_dir),
        # NOTE: dirname_with_dir is kept for backwards compliance
        'dirname_with_dir': dirname_with_dir,
        'flags': flags,
        'special': special,
        'extra_class': extra_class,
    }

    if long_list(flags):
        dir_obj['actual_dir'] = long_format(actual_dir)

    if file_info(flags):
        dir_obj['file_info'] = fileinfo_stat(actual_dir)

    listing.append(dir_obj)
Beispiel #7
0
 def listdir(self, path):
     """List the content of a directory with MiG restrictions"""
     return [
         i for i in AbstractedFS.listdir(self, path)
         if not invisible_path(i)
     ]
Beispiel #8
0
def handle_expand(
    configuration,
    output_objects,
    listing,
    base_dir,
    real_path,
    flags='',
    dest='',
    depth=0,
    show_dest=False,
):
    """Recursive function to expand paths in a way not unlike ls, but only
    files are interesting in this context. The order of recursively expanded
    paths is different from that in ls since it simplifies the code and
    doesn't really matter to the clients.
    """

    # Sanity check

    if depth > 255:
        output_objects.append({'object_type': 'error_text', 'text': 'Error: file recursion maximum exceeded!'
                               })
        return (output_objects, returnvalues.SYSTEM_ERROR)

    # references to '.' or similar are stripped by abspath

    if real_path + os.sep == base_dir:
        base_name = relative_path = '.'
    else:
        base_name = os.path.basename(real_path)
        relative_path = real_path.replace(base_dir, '')

    # Recursion can get here when called without explicit invisible files

    if invisible_path(relative_path):
        return

    if os.path.isfile(real_path):
        handle_file(configuration, listing, relative_path, relative_path,
                    real_path, flags, dest, show_dest)
    else:
        try:
            contents = os.listdir(real_path)
        except Exception as exc:
            output_objects.append({'object_type': 'error_text', 'text': 'Failed to list contents of %s: %s'
                                   % (base_name, exc)})
            return (output_objects, returnvalues.SYSTEM_ERROR)

        # Filter out dot files unless '-a' is used

        if not all(flags):
            contents = [i for i in contents if not i.startswith('.')]
        contents.sort()

        if not recursive(flags) or depth < 0:

            for name in contents:
                path = real_path + os.sep + name
                rel_path = path.replace(base_dir, '')
                if os.path.isfile(path):
                    handle_file(configuration, listing, rel_path, rel_path,
                                path, flags,
                                os.path.join(dest, os.path.basename(rel_path)),
                                show_dest)
        else:

            # Force pure content listing first by passing a negative depth

            handle_expand(
                configuration,
                output_objects,
                listing,
                base_dir,
                real_path,
                flags,
                dest,
                -1,
                show_dest,
            )

            for name in contents:
                path = real_path + os.sep + name
                rel_path = path.replace(base_dir, '')
                if os.path.isdir(path):
                    handle_expand(
                        configuration,
                        output_objects,
                        listing,
                        base_dir,
                        path,
                        flags,
                        os.path.join(dest, name),
                        depth + 1,
                        show_dest,
                    )