def finalizeUpload(self, upload, file):
        """
        Moves the file into its permanent content-addressed location within the
        assetstore. Directory hierarchy yields 256^2 buckets.
        """
        hash = hash_state.restoreHex(upload['sha512state'],
                                     'sha512').hexdigest()
        dir = os.path.join(hash[0:2], hash[2:4])
        absdir = os.path.join(self.assetstore['root'], dir)

        path = os.path.join(dir, hash)
        abspath = os.path.join(self.assetstore['root'], path)

        mkdir(absdir)

        if os.path.exists(abspath):
            # Already have this file stored, just delete temp file.
            os.remove(upload['tempFile'])
        else:
            # Move the temp file to permanent location in the assetstore.
            # shutil.move works across filesystems
            shutil.move(upload['tempFile'], abspath)
            try:
                os.chmod(abspath, stat.S_IRUSR | stat.S_IWUSR)
            except OSError:
                # some filesystems may not support POSIX permissions
                pass

        file['sha512'] = hash
        file['path'] = path

        return file
Beispiel #2
0
def _setupLogger():
    """
    Sets up the Girder logger.
    """
    logger = logging.getLogger('girder')
    logger.setLevel(logging.DEBUG)

    logPaths = getLogPaths()

    # Ensure log paths are valid
    logDirs = [
        logPaths['root'],
        os.path.dirname(logPaths['info']),
        os.path.dirname(logPaths['error'])
    ]
    for logDir in logDirs:
        mkdir(logDir)

    eh = logging.handlers.RotatingFileHandler(
        logPaths['error'], maxBytes=MAX_LOG_SIZE, backupCount=LOG_BACKUP_COUNT)
    eh.setLevel(logging.WARNING)
    eh.addFilter(LogLevelFilter(min=logging.WARNING, max=logging.CRITICAL))
    ih = logging.handlers.RotatingFileHandler(
        logPaths['info'], maxBytes=MAX_LOG_SIZE, backupCount=LOG_BACKUP_COUNT)
    ih.setLevel(logging.INFO)
    ih.addFilter(LogLevelFilter(min=logging.DEBUG, max=logging.INFO))

    fmt = LogFormatter('[%(asctime)s] %(levelname)s: %(message)s')
    eh.setFormatter(fmt)
    ih.setFormatter(fmt)

    logger.addHandler(eh)
    logger.addHandler(ih)
    return logger
    def finalizeUpload(self, upload, file):
        """
        Moves the file into its permanent content-addressed location within the
        assetstore. Directory hierarchy yields 256^2 buckets.
        """
        hash = hash_state.restoreHex(upload['sha512state'],
                                     'sha512').hexdigest()
        dir = os.path.join(hash[0:2], hash[2:4])
        absdir = os.path.join(self.assetstore['root'], dir)

        path = os.path.join(dir, hash)
        abspath = os.path.join(self.assetstore['root'], path)

        mkdir(absdir)

        if os.path.exists(abspath):
            # Already have this file stored, just delete temp file.
            os.remove(upload['tempFile'])
        else:
            # Move the temp file to permanent location in the assetstore.
            # shutil.move works across filesystems
            shutil.move(upload['tempFile'], abspath)
            try:
                os.chmod(abspath, self.assetstore.get('perms', DEFAULT_PERMS))
            except OSError:
                # some filesystems may not support POSIX permissions
                pass

        file['sha512'] = hash
        file['path'] = path

        return file
Beispiel #4
0
    def finalizeUpload(self, upload, file):
        """
        Moves the file into its permanent content-addressed location within the
        assetstore. Directory hierarchy yields 256^2 buckets.
        """
        hash = _hash_state.restoreHex(upload['sha512state'],
                                      'sha512').hexdigest()
        dir = os.path.join(hash[0:2], hash[2:4])
        absdir = os.path.join(self.assetstore['root'], dir)

        path = os.path.join(dir, hash)
        abspath = os.path.join(self.assetstore['root'], path)

        # Store the hash in the upload so that deleting a file won't delete
        # this file
        if '_id' in upload:
            upload['sha512'] = hash
            Upload().update({'_id': upload['_id']},
                            update={'$set': {
                                'sha512': hash
                            }})

        mkdir(absdir)

        # Only maintain the lock which checking if the file exists.  The only
        # other place the lock is used is checking if an upload task has
        # reserved the file, so this is sufficient.
        with filelock.FileLock(abspath + '.deleteLock'):
            pathExists = os.path.exists(abspath)
        if pathExists:
            # Already have this file stored, just delete temp file.
            os.unlink(upload['tempFile'])
        else:
            # Move the temp file to permanent location in the assetstore.
            # shutil.move works across filesystems
            shutil.move(upload['tempFile'], abspath)
            try:
                os.chmod(abspath, self.assetstore.get('perms', DEFAULT_PERMS))
            except OSError:
                # some filesystems may not support POSIX permissions
                pass

        file['sha512'] = hash
        file['path'] = path

        return file
 def __init__(self, assetstore):
     super(FilesystemAssetstoreAdapter, self).__init__(assetstore)
     # If we can't create the temp directory, the assetstore still needs to
     # be initialized so that it can be deleted or modified.  The validation
     # prevents invalid new assetstores from being created, so this only
     # happens to existing assetstores that no longer can access their temp
     # directories.
     self.tempDir = os.path.join(self.assetstore['root'], 'temp')
     try:
         mkdir(self.tempDir)
     except OSError:
         self.unavailable = True
         logger.exception('Failed to create filesystem assetstore '
                          'directories %s' % self.tempDir)
     if not os.access(self.assetstore['root'], os.W_OK):
         self.unavailable = True
         logger.error('Could not write to assetstore root: %s',
                      self.assetstore['root'])
Beispiel #6
0
 def __init__(self, assetstore):
     super().__init__(assetstore)
     # If we can't create the temp directory, the assetstore still needs to
     # be initialized so that it can be deleted or modified.  The validation
     # prevents invalid new assetstores from being created, so this only
     # happens to existing assetstores that no longer can access their temp
     # directories.
     self.tempDir = os.path.join(self.assetstore['root'], 'temp')
     try:
         mkdir(self.tempDir)
     except OSError:
         self.unavailable = True
         logger.exception('Failed to create filesystem assetstore '
                          'directories %s' % self.tempDir)
     if not os.access(self.assetstore['root'], os.W_OK):
         self.unavailable = True
         logger.error('Could not write to assetstore root: %s',
                      self.assetstore['root'])
Beispiel #7
0
def startFromConfig():
    """
    Check if the config file has a section [server_fuse] and key "path".  If
    so, mount a FUSE at the specified path without using access validation.

    :returns: True if a mount was made.  False if an error was raised.  None
        if no mount was attemped.
    """
    cfg = config.getConfig().get('server_fuse', {})
    path = cfg.get('path')
    cherrypy.engine.subscribe('stop', server_fuse.unmountAll)

    if path:
        try:
            mkdir(path)
            return server_fuse.mountServerFuse(MAIN_FUSE_KEY, path, force=True)
        except Exception:
            logger.exception('Can\'t mount resource fuse: %s' % path)
            return False
    def finalizeUpload(self, upload, file):
        """
        Moves the file into its permanent content-addressed location within the
        assetstore. Directory hierarchy yields 256^2 buckets.
        """
        hash = _hash_state.restoreHex(upload['sha512state'], 'sha512').hexdigest()
        dir = os.path.join(hash[0:2], hash[2:4])
        absdir = os.path.join(self.assetstore['root'], dir)

        path = os.path.join(dir, hash)
        abspath = os.path.join(self.assetstore['root'], path)

        # Store the hash in the upload so that deleting a file won't delete
        # this file
        if '_id' in upload:
            upload['sha512'] = hash
            Upload().update({'_id': upload['_id']}, update={'$set': {'sha512': hash}})

        mkdir(absdir)

        # Only maintain the lock which checking if the file exists.  The only
        # other place the lock is used is checking if an upload task has
        # reserved the file, so this is sufficient.
        with filelock.FileLock(abspath + '.deleteLock'):
            pathExists = os.path.exists(abspath)
        if pathExists:
            # Already have this file stored, just delete temp file.
            os.unlink(upload['tempFile'])
        else:
            # Move the temp file to permanent location in the assetstore.
            # shutil.move works across filesystems
            shutil.move(upload['tempFile'], abspath)
            try:
                os.chmod(abspath, self.assetstore.get('perms', DEFAULT_PERMS))
            except OSError:
                # some filesystems may not support POSIX permissions
                pass

        file['sha512'] = hash
        file['path'] = path

        return file
Beispiel #9
0
    def validateInfo(doc):
        """
        Makes sure the root field is a valid absolute path and is writeable.
        It also conveniently update the root field replacing the initial
        component by the user home directory running the server if it matches
        ``~`` or ``~user``.
        """
        doc['root'] = os.path.expanduser(doc['root'])

        if not os.path.isabs(doc['root']):
            raise ValidationException(
                'You must provide an absolute path '
                'for the root directory.', 'root')

        try:
            mkdir(doc['root'])
        except OSError:
            msg = 'Could not make directory "%s".' % doc['root']
            logger.exception(msg)
            raise ValidationException(msg)
        if not os.access(doc['root'], os.W_OK):
            raise ValidationException('Unable to write into directory "%s".' %
                                      doc['root'])

        if not doc.get('perms'):
            doc['perms'] = DEFAULT_PERMS
        else:
            try:
                perms = doc['perms']
                if not isinstance(perms, int):
                    perms = int(doc['perms'], 8)

                # Make sure that mode is still rw for user
                if not perms & stat.S_IRUSR or not perms & stat.S_IWUSR:
                    raise ValidationException(
                        'File permissions must allow "rw" for user.')
                doc['perms'] = perms
            except ValueError:
                raise ValidationException(
                    'File permissions must be an octal integer.')
    def validateInfo(doc):
        """
        Makes sure the root field is a valid absolute path and is writeable.
        It also conveniently update the root field replacing the initial
        component by the user home directory running the server if it matches
        ``~`` or ``~user``.
        """
        doc['root'] = os.path.expanduser(doc['root'])

        if not os.path.isabs(doc['root']):
            raise ValidationException('You must provide an absolute path '
                                      'for the root directory.', 'root')

        try:
            mkdir(doc['root'])
        except OSError:
            msg = 'Could not make directory "%s".' % doc['root']
            logger.exception(msg)
            raise ValidationException(msg)
        if not os.access(doc['root'], os.W_OK):
            raise ValidationException(
                'Unable to write into directory "%s".' % doc['root'])

        if not doc.get('perms'):
            doc['perms'] = DEFAULT_PERMS
        else:
            try:
                perms = doc['perms']
                if not isinstance(perms, int):
                    perms = int(doc['perms'], 8)

                # Make sure that mode is still rw for user
                if not perms & stat.S_IRUSR or not perms & stat.S_IWUSR:
                    raise ValidationException(
                        'File permissions must allow "rw" for user.')
                doc['perms'] = perms
            except ValueError:
                raise ValidationException(
                    'File permissions must be an octal integer.')
    def validateInfo(doc):
        """
        Makes sure the root field is a valid absolute path and is writeable.
        It also conveniently update the root field replacing the initial
        component by the user home directory running the server if it matches
        ``~`` or ``~user``.
        """
        doc['root'] = os.path.expanduser(doc['root'])

        if not os.path.isabs(doc['root']):
            raise ValidationException('You must provide an absolute path '
                                      'for the root directory.', 'root')

        try:
            mkdir(doc['root'])
        except OSError:
            msg = 'Could not make directory "%s".' % doc['root']
            logger.exception(msg)
            raise ValidationException(msg)
        if not os.access(doc['root'], os.W_OK):
            raise ValidationException(
                'Unable to write into directory "%s".' % doc['root'])
Beispiel #12
0
def getPluginDirs(curConfig=None):
    """Return an ordered list of directories that plugins can live in."""
    failedPluginDirs = set()

    if curConfig is None:
        curConfig = _config.getConfig()

    if 'plugins' in curConfig and 'plugin_directory' in curConfig['plugins']:
        pluginDirs = curConfig['plugins']['plugin_directory'].split(':')
    else:
        pluginDirs = [defaultPluginDir()]

    for pluginDir in pluginDirs:
        try:
            mkdir(pluginDir)
        except OSError:
            print(TerminalColor.warning(
                'Could not create plugin directory %s.' % pluginDir))

            failedPluginDirs.add(pluginDir)

    return [dir for dir in pluginDirs if dir not in failedPluginDirs]
Beispiel #13
0
def getPluginDirs(curConfig=None):
    """Return an ordered list of directories that plugins can live in."""
    failedPluginDirs = set()

    if curConfig is None:
        curConfig = _config.getConfig()

    if 'plugins' in curConfig and 'plugin_directory' in curConfig['plugins']:
        pluginDirs = curConfig['plugins']['plugin_directory'].split(':')
    else:
        pluginDirs = [defaultPluginDir()]

    for pluginDir in pluginDirs:
        try:
            mkdir(pluginDir)
        except OSError:
            logprint.warning(
                'Could not create plugin directory %s.' % pluginDir)

            failedPluginDirs.add(pluginDir)

    return [dir for dir in pluginDirs if dir not in failedPluginDirs]
    def validateInfo(doc):
        """
        Makes sure the root field is a valid absolute path and is writeable.
        It also conveniently update the root field replacing the initial
        component by the user home directory running the server if it matches
        ``~`` or ``~user``.
        """
        doc['root'] = os.path.expanduser(doc['root'])

        if not os.path.isabs(doc['root']):
            raise ValidationException(
                'You must provide an absolute path '
                'for the root directory.', 'root')

        try:
            mkdir(doc['root'])
        except OSError:
            msg = 'Could not make directory "%s".' % doc['root']
            logger.exception(msg)
            raise ValidationException(msg)
        if not os.access(doc['root'], os.W_OK):
            raise ValidationException('Unable to write into directory "%s".' %
                                      doc['root'])
Beispiel #15
0
def _setupLogger():
    """
    Sets up the Girder logger.
    """
    logger = logging.getLogger('girder')
    logger.setLevel(logging.DEBUG)

    logPaths = getLogPaths()

    # Ensure log paths are valid
    logDirs = [
        logPaths['root'],
        os.path.dirname(logPaths['info']),
        os.path.dirname(logPaths['error'])
    ]
    for logDir in logDirs:
        mkdir(logDir)

    eh = logging.handlers.RotatingFileHandler(logPaths['error'],
                                              maxBytes=MAX_LOG_SIZE,
                                              backupCount=LOG_BACKUP_COUNT)
    eh.setLevel(logging.WARNING)
    eh.addFilter(LogLevelFilter(min=logging.WARNING, max=logging.CRITICAL))
    ih = logging.handlers.RotatingFileHandler(logPaths['info'],
                                              maxBytes=MAX_LOG_SIZE,
                                              backupCount=LOG_BACKUP_COUNT)
    ih.setLevel(logging.INFO)
    ih.addFilter(LogLevelFilter(min=logging.DEBUG, max=logging.INFO))

    fmt = LogFormatter('[%(asctime)s] %(levelname)s: %(message)s')
    eh.setFormatter(fmt)
    ih.setFormatter(fmt)

    logger.addHandler(eh)
    logger.addHandler(ih)
    return logger
Beispiel #16
0
def _setupLogger():
    """
    Sets up the Girder logger.
    """
    global _quiet

    logger = logging.getLogger('girder')
    cfg = config.getConfig()
    logCfg = cfg.get('logging', {})

    # If we are asked to be quiet, set a global flag so that logprint doesn't
    # have to get the configuration settings every time it is used.
    if logCfg.get('log_quiet') is True:
        _quiet = True

    logPaths = getLogPaths()
    # Ensure log paths are valid
    logDirs = [
        logPaths['root'],
        os.path.dirname(logPaths['info']),
        os.path.dirname(logPaths['error'])
    ]
    for logDir in logDirs:
        mkdir(logDir)

    # Set log level
    level = logging.INFO
    if logCfg.get('log_level') and isinstance(
            getattr(logging, logCfg['log_level'], None), int):
        level = getattr(logging, logCfg['log_level'])
    logger.setLevel(logging.DEBUG if level is None else level)

    logSize = MAX_LOG_SIZE
    if logCfg.get('log_max_size'):
        sizeValue = logCfg['log_max_size']
        sizeUnits = {'kb': 1024, 'Mb': 1024**2, 'Gb': 1024**3}
        if sizeValue[-2:] in sizeUnits:
            logSize = int(sizeValue[:-2].strip()) * sizeUnits[sizeValue[-2:]]
        else:
            logSize = int(sizeValue)
    backupCount = int(logCfg.get('log_backup_count', LOG_BACKUP_COUNT))

    # Remove extant log handlers (this allows this function to called multiple
    # times)
    for handler in list(logger.handlers):
        if hasattr(handler, '_girderLogHandler'):
            logger.removeHandler(handler)
            cherrypy.log.access_log.removeHandler(handler)

    fmt = LogFormatter('[%(asctime)s] %(levelname)s: %(message)s')
    infoMaxLevel = logging.INFO
    # Create log handlers
    if logPaths['error'] != logPaths['info']:
        eh = logging.handlers.RotatingFileHandler(logPaths['error'],
                                                  maxBytes=logSize,
                                                  backupCount=backupCount)
        eh.setLevel(level)
        eh.addFilter(LogLevelFilter(min=logging.WARNING, max=logging.CRITICAL))
        eh._girderLogHandler = 'error'
        eh.setFormatter(fmt)
        logger.addHandler(eh)
        # Record cherrypy errors in our logs, too
        cherrypy.log.error_log.addHandler(eh)
    else:
        infoMaxLevel = logging.CRITICAL

    if isinstance(getattr(logging, logCfg.get('log_max_info_level', ''), None),
                  int):
        infoMaxLevel = getattr(logging, logCfg['log_max_info_level'])
    ih = logging.handlers.RotatingFileHandler(logPaths['info'],
                                              maxBytes=logSize,
                                              backupCount=backupCount)
    ih.setLevel(level)
    ih.addFilter(LogLevelFilter(min=logging.DEBUG, max=infoMaxLevel))
    ih._girderLogHandler = 'info'
    ih.setFormatter(fmt)
    logger.addHandler(ih)
    # Record cherrypy errors in our logs, too
    cherrypy.log.error_log.addHandler(ih)

    # Log http accesses to the screen and/or the info log.
    accessLog = logCfg.get('log_access', 'screen')
    if not isinstance(accessLog, (tuple, list, set)):
        accessLog = [accessLog]
    if _quiet or ('screen' not in accessLog and 'stdout' not in accessLog):
        cherrypy.config.update({'log.screen': False})
    if 'info' in accessLog:
        cherrypy.log.access_log.addHandler(ih)

    return logger
Beispiel #17
0
def _setupLogger():
    """
    Sets up the Girder logger.
    """
    global _quiet

    logger = logging.getLogger('girder')
    cfg = config.getConfig()
    logCfg = cfg.get('logging', {})

    # If we are asked to be quiet, set a global flag so that logprint doesn't
    # have to get the configuration settings every time it is used.
    if logCfg.get('log_quiet') is True:
        _quiet = True

    logPaths = getLogPaths()
    # Ensure log paths are valid
    logDirs = [
        logPaths['root'],
        os.path.dirname(logPaths['info']),
        os.path.dirname(logPaths['error'])
    ]
    for logDir in logDirs:
        mkdir(logDir)

    # Set log level
    level = logging.INFO
    if logCfg.get('log_level') and isinstance(getattr(logging, logCfg['log_level'], None), int):
        level = getattr(logging, logCfg['log_level'])
    logger.setLevel(logging.DEBUG if level is None else level)

    logSize = MAX_LOG_SIZE
    if logCfg.get('log_max_size'):
        sizeValue = logCfg['log_max_size']
        sizeUnits = {'kb': 1024, 'Mb': 1024 ** 2, 'Gb': 1024 ** 3}
        if sizeValue[-2:] in sizeUnits:
            logSize = int(sizeValue[:-2].strip()) * sizeUnits[sizeValue[-2:]]
        else:
            logSize = int(sizeValue)
    backupCount = int(logCfg.get('log_backup_count', LOG_BACKUP_COUNT))

    # Remove extant log handlers (this allows this function to called multiple
    # times)
    for handler in list(logger.handlers):
        if hasattr(handler, '_girderLogHandler'):
            logger.removeHandler(handler)
            cherrypy.log.access_log.removeHandler(handler)

    fmt = LogFormatter('[%(asctime)s] %(levelname)s: %(message)s')
    infoMaxLevel = logging.INFO
    # Create log handlers
    if logPaths['error'] != logPaths['info']:
        eh = logging.handlers.RotatingFileHandler(
            logPaths['error'], maxBytes=logSize, backupCount=backupCount)
        eh.setLevel(level)
        eh.addFilter(LogLevelFilter(min=logging.WARNING, max=logging.CRITICAL))
        eh._girderLogHandler = 'error'
        eh.setFormatter(fmt)
        logger.addHandler(eh)
    else:
        infoMaxLevel = logging.CRITICAL

    if isinstance(getattr(logging, logCfg.get('log_max_info_level', ''), None), int):
        infoMaxLevel = getattr(logging, logCfg['log_max_info_level'])
    ih = logging.handlers.RotatingFileHandler(
        logPaths['info'], maxBytes=logSize, backupCount=backupCount)
    ih.setLevel(level)
    ih.addFilter(LogLevelFilter(min=logging.DEBUG, max=infoMaxLevel))
    ih._girderLogHandler = 'info'
    ih.setFormatter(fmt)
    logger.addHandler(ih)

    sys.stdout = StreamToLogger(_originalStdOut, logger, logging.INFO)
    sys.stderr = StreamToLogger(_originalStdErr, logger, logging.ERROR)

    # Log http accesses to the screen and/or the info log.
    accessLog = logCfg.get('log_access', 'screen')
    if not isinstance(accessLog, (tuple, list, set)):
        accessLog = [accessLog]
    if _quiet or ('screen' not in accessLog and 'stdout' not in accessLog):
        cherrypy.config.update({'log.screen': False})
    if 'info' in accessLog:
        cherrypy.log.access_log.addHandler(ih)

    return logger