def process_annotations(event):
    """Add annotations to an image on a ``data.process`` event"""
    info = event.info
    if 'anot' in info.get('file', {}).get('exts', []):
        reference = info.get('reference', None)

        try:
            reference = json.loads(reference)
        except (ValueError, TypeError):
            print(TerminalColor.error(
                'Warning: Could not get reference from the annotation param. '
                'Make sure you have at ctk-cli>=1.3.1 installed.'
            ))
            return

        if 'userId' not in reference or 'itemId' not in reference:
            print(TerminalColor.error(
                'Annotation reference does not contain required information.'
            ))
            return

        userId = reference['userId']
        imageId = reference['itemId']

        # load model classes
        Item = ModelImporter.model('item')
        File = ModelImporter.model('file')
        User = ModelImporter.model('user')
        Annotation = ModelImporter.model('annotation', plugin='large_image')

        # load models from the database
        user = User.load(userId, force=True)
        image = File.load(imageId, level=AccessType.READ, user=user)
        item = Item.load(image['itemId'], level=AccessType.WRITE, user=user)
        file = File.load(
            info.get('file', {}).get('_id'),
            level=AccessType.READ, user=user
        )

        if not (item and user and file):
            print(TerminalColor.error(
                'Could not load models from the database'
            ))
            return

        try:
            data = json.loads(
                ''.join(File.download(file)())
            )
        except Exception:
            print(TerminalColor.error(
                'Could not parse annotation file'
            ))
            return

        Annotation.createAnnotation(
            item,
            user,
            data
        )
Beispiel #2
0
def getDbConnection(uri=None, replicaSet=None):
    """
    Get a MongoClient object that is connected to the configured database.
    We lazy-instantiate a module-level singleton, the MongoClient objects
    manage their own connection pools internally.

    :param uri: if specified, connect to this mongo db rather than the one in
                the config.
    :param replicaSet: if uri is specified, use this replica set.
    """
    global _dbClients

    origKey = (uri, replicaSet)
    if origKey in _dbClients:
        return _dbClients[origKey]

    if uri is None or uri == '':
        dbConf = getDbConfig()
        uri = dbConf.get('uri')
        replicaSet = dbConf.get('replica_set')
    clientOptions = {
        'connectTimeoutMS': 15000,
        # This is the maximum time between when we fetch data from a cursor.
        # If it times out, the cursor is lost and we can't reconnect.  If it
        # isn't set, we have issues with replica sets when the primary goes
        # down.  This value can be overridden in the mongodb uri connection
        # string with the socketTimeoutMS.
        'socketTimeoutMS': 60000,
    }
    if uri is None:
        dbUriRedacted = 'mongodb://*****:*****@')
        if len(parts) == 2:
            dbUriRedacted = 'mongodb://' + parts[1]
        else:
            dbUriRedacted = uri

        if replicaSet:
            client = pymongo.MongoReplicaSetClient(
                uri,
                replicaSet=replicaSet,
                read_preference=ReadPreference.SECONDARY_PREFERRED,
                **clientOptions)
        else:
            client = pymongo.MongoClient(uri, **clientOptions)
    client = MongoProxy(client, logger=logger)
    _dbClients[origKey] = _dbClients[(uri, replicaSet)] = client
    desc = ''
    if replicaSet:
        desc += ', replica set: %s' % replicaSet
    print(
        TerminalColor.info('Connected to MongoDB: %s%s' %
                           (dbUriRedacted, desc)))
    return client
Beispiel #3
0
    def route(self, method, route, handler, nodoc=False, resource=None):
        """
        Define a route for your REST resource.

        :param method: The HTTP method, e.g. 'GET', 'POST', 'PUT', 'PATCH'
        :type method: str
        :param route: The route, as a list of path params relative to the
            resource root. Elements of this list starting with ':' are assumed
            to be wildcards.
        :type route: tuple
        :param handler: The method to be called if the route and method are
            matched by a request. Wildcards in the route will be expanded and
            passed as kwargs with the same name as the wildcard identifier.
        :type handler: function
        :param nodoc: If your route intentionally provides no documentation,
            set this to True to disable the warning on startup.

        :type nodoc: bool
        :param resource: The name of the resource at the root of this route.
        """
        if not hasattr(self, '_routes'):
            self._routes = collections.defaultdict(
                lambda: collections.defaultdict(list))

        # Insertion sort to maintain routes in required order.
        nLengthRoutes = self._routes[method.lower()][len(route)]
        for i in range(len(nLengthRoutes)):
            if self._shouldInsertRoute(route, nLengthRoutes[i][0]):
                nLengthRoutes.insert(i, (route, handler))
                break
        else:
            nLengthRoutes.append((route, handler))

        # Now handle the api doc if the handler has any attached
        if resource is None and hasattr(self, 'resourceName'):
            resource = self.resourceName
        elif resource is None:
            resource = handler.__module__.rsplit('.', 1)[-1]

        if hasattr(handler, 'description'):
            if handler.description is not None:
                docs.addRouteDocs(resource=resource,
                                  route=route,
                                  method=method,
                                  info=handler.description.asDict(),
                                  handler=handler)
        elif not nodoc:
            routePath = '/'.join([resource] + list(route))
            print(
                TerminalColor.warning(
                    'WARNING: No description docs present for route {} {}'.
                    format(method, routePath)))

        # Warn if there is no access decorator on the handler function
        if not hasattr(handler, 'accessLevel'):
            routePath = '/'.join([resource] + list(route))
            print(
                TerminalColor.warning(
                    'WARNING: No access level specified for route {} {}'.
                    format(method, routePath)))
Beispiel #4
0
    def route(self, method, route, handler, nodoc=False, resource=None):
        """
        Define a route for your REST resource.

        :param method: The HTTP method, e.g. 'GET', 'POST', 'PUT'
        :type method: str
        :param route: The route, as a list of path params relative to the
        resource root. Elements of this list starting with ':' are assumed to
        be wildcards.
        :type route: list
        :param handler: The method to be called if the route and method are
        matched by a request. Wildcards in the route will be expanded and
        passed as kwargs with the same name as the wildcard identifier.
        :type handler: function
        :param nodoc: If your route intentionally provides no documentation,
                      set this to True to disable the warning on startup.
        :type nodoc: bool
        """
        if not hasattr(self, '_routes'):
            self._routes = collections.defaultdict(
                lambda: collections.defaultdict(list))

        # Insertion sort to maintain routes in required order.
        def shouldInsert(a, b):
            """
            Return bool representing whether route a should go before b. Checks
            by comparing each token in order and making sure routes with
            literals in forward positions come before routes with wildcards
            in those positions.
            """
            for i in xrange(0, len(a)):
                if a[i][0] != ':' and b[i][0] == ':':
                    return True
            return False

        nLengthRoutes = self._routes[method.lower()][len(route)]
        for i in xrange(0, len(nLengthRoutes)):
            if shouldInsert(route, nLengthRoutes[i][0]):
                nLengthRoutes.insert(i, (route, handler))
                break
        else:
            nLengthRoutes.append((route, handler))

        # Now handle the api doc if the handler has any attached
        if resource is None and hasattr(self, 'resourceName'):
            resource = self.resourceName
        elif resource is None:
            resource = handler.__module__.rsplit('.', 1)[-1]

        if hasattr(handler, 'description'):
            if handler.description is not None:
                docs.addRouteDocs(
                    resource=resource, route=route, method=method,
                    info=handler.description.asDict(), handler=handler)
        elif not nodoc:
            routePath = '/'.join([resource] + list(route))
            print TerminalColor.warning(
                'WARNING: No description docs present for route {} {}'
                .format(method, routePath))
Beispiel #5
0
def loadPlugins(plugins, root, appconf, apiRoot=None, curConfig=None,
                buildDag=True):
    """
    Loads a set of plugins into the application.

    :param plugins: The set of plugins to load, by directory name.
    :type plugins: list
    :param root: The root node of the server tree.
    :type root: object
    :param appconf: The server's cherrypy configuration object.
    :type appconf: dict
    :param apiRoot: The cherrypy api root object.
    :type apiRoot: object or None
    :param curConfig: A girder config object to use.
    :type curConfig: dict or None
    :param buildDag: If the ``plugins`` parameter is already a topo-sorted list
        with all dependencies resolved, set this to False and it will skip
        rebuilding the DAG. Otherwise the dependency resolution and sorting
        will occur within this method.
    :type buildDag: bool
    :returns: A 3-tuple containing the modified root, config, and apiRoot
        objects.
    :rtype tuple:
    """
    # Register a pseudo-package for the root of all plugins. This must be
    # present in the system module list in order to avoid import warnings.
    if curConfig is None:
        curConfig = _config.getConfig()

    if 'plugins' in curConfig and 'plugin_directory' in curConfig['plugins']:
        print(TerminalColor.warning(
            'Warning: the plugin_directory setting is deprecated. Please use '
            'the `girder-install plugin` command and remove this setting from '
            'your config file.'))

    if ROOT_PLUGINS_PACKAGE not in sys.modules:
        module = imp.new_module(ROOT_PLUGINS_PACKAGE)
        girder.plugins = module
        sys.modules[ROOT_PLUGINS_PACKAGE] = module

    print(TerminalColor.info('Resolving plugin dependencies...'))

    if buildDag:
        plugins = getToposortedPlugins(plugins, curConfig, ignoreMissing=True)

    for plugin in plugins:
        try:
            root, appconf, apiRoot = loadPlugin(
                plugin, root, appconf, apiRoot, curConfig=curConfig)
            print(TerminalColor.success('Loaded plugin "%s"' % plugin))
        except Exception:
            print(TerminalColor.error(
                'ERROR: Failed to load plugin "%s":' % plugin))
            girder.logger.exception('Plugin load failure: %s' % plugin)
            traceback.print_exc()

    return root, appconf, apiRoot
Beispiel #6
0
def getDbConnection(uri=None, replicaSet=None):
    """
    Get a MongoClient object that is connected to the configured database.
    We lazy-instantiate a module-level singleton, the MongoClient objects
    manage their own connection pools internally.

    :param uri: if specified, connect to this mongo db rather than the one in
                the config.
    :param replicaSet: if uri is specified, use this replica set.
    """
    global _dbClients

    origKey = (uri, replicaSet)
    if origKey in _dbClients:
        return _dbClients[origKey]

    if uri is None or uri == '':
        dbConf = getDbConfig()
        uri = dbConf.get('uri')
        replicaSet = dbConf.get('replica_set')
    clientOptions = {
        'connectTimeoutMS': 15000,
        # This is the maximum time between when we fetch data from a cursor.
        # If it times out, the cursor is lost and we can't reconnect.  If it
        # isn't set, we have issues with replica sets when the primary goes
        # down.  This value can be overridden in the mongodb uri connection
        # string with the socketTimeoutMS.
        'socketTimeoutMS': 60000,
        }
    if uri is None:
        dbUriRedacted = 'mongodb://*****:*****@')
        if len(parts) == 2:
            dbUriRedacted = 'mongodb://' + parts[1]
        else:
            dbUriRedacted = uri

        if replicaSet:
            client = pymongo.MongoReplicaSetClient(
                uri, replicaSet=replicaSet,
                read_preference=ReadPreference.SECONDARY_PREFERRED,
                **clientOptions)
        else:
            client = pymongo.MongoClient(uri, **clientOptions)
    client = MongoProxy(client, logger=logger)
    _dbClients[origKey] = _dbClients[(uri, replicaSet)] = client
    desc = ''
    if replicaSet:
        desc += ', replica set: %s' % replicaSet
    print(TerminalColor.info('Connected to MongoDB: %s%s' % (dbUriRedacted,
                                                             desc)))
    return client
Beispiel #7
0
    def route(self, method, route, handler, nodoc=False, resource=None):
        """
        Define a route for your REST resource.

        :param method: The HTTP method, e.g. 'GET', 'POST', 'PUT', 'PATCH'
        :type method: str
        :param route: The route, as a list of path params relative to the
            resource root. Elements of this list starting with ':' are assumed
            to be wildcards.
        :type route: tuple
        :param handler: The method to be called if the route and method are
            matched by a request. Wildcards in the route will be expanded and
            passed as kwargs with the same name as the wildcard identifier.
        :type handler: function
        :param nodoc: If your route intentionally provides no documentation,
            set this to True to disable the warning on startup.

        :type nodoc: bool
        :param resource: The name of the resource at the root of this route.
        """
        if not hasattr(self, '_routes'):
            self._routes = collections.defaultdict(
                lambda: collections.defaultdict(list))

        # Insertion sort to maintain routes in required order.
        nLengthRoutes = self._routes[method.lower()][len(route)]
        for i in xrange(0, len(nLengthRoutes)):
            if self._shouldInsertRoute(route, nLengthRoutes[i][0]):
                nLengthRoutes.insert(i, (route, handler))
                break
        else:
            nLengthRoutes.append((route, handler))

        # Now handle the api doc if the handler has any attached
        if resource is None and hasattr(self, 'resourceName'):
            resource = self.resourceName
        elif resource is None:
            resource = handler.__module__.rsplit('.', 1)[-1]

        if hasattr(handler, 'description'):
            if handler.description is not None:
                docs.addRouteDocs(
                    resource=resource, route=route, method=method,
                    info=handler.description.asDict(), handler=handler)
        elif not nodoc:
            routePath = '/'.join([resource] + list(route))
            print TerminalColor.warning(
                'WARNING: No description docs present for route {} {}'
                .format(method, routePath))

        # Warn if there is no access decorator on the handler function
        if not hasattr(handler, 'accessLevel'):
            routePath = '/'.join([resource] + list(route))
            print TerminalColor.warning(
                'WARNING: No access level specified for route {} {}'
                .format(method, routePath))
Beispiel #8
0
def findAllPlugins(curConfig=None):
    """
    Walks the plugins directory to find all of the plugins. If the plugin has
    a plugin.json file, this reads that file to determine dependencies.
    """
    allPlugins = {}
    pluginsDir = getPluginDir(curConfig)
    if not pluginsDir:
        print(
            TerminalColor.warning('Plugin directory not found. No plugins '
                                  'loaded.'))
        return allPlugins
    dirs = [
        dir for dir in os.listdir(pluginsDir)
        if os.path.isdir(os.path.join(pluginsDir, dir))
    ]

    for plugin in dirs:
        data = {}
        configJson = os.path.join(pluginsDir, plugin, 'plugin.json')
        configYml = os.path.join(pluginsDir, plugin, 'plugin.yml')
        if os.path.isfile(configJson):
            with open(configJson) as conf:
                try:
                    data = json.load(conf)
                except ValueError as e:
                    print(
                        TerminalColor.error(
                            'ERROR: Plugin "%s": plugin.json is not valid JSON.'
                            % plugin))
                    print e
                    continue
        elif os.path.isfile(configYml):
            with open(configYml) as conf:
                try:
                    data = yaml.safe_load(conf)
                except yaml.YAMLError as e:
                    print(
                        TerminalColor.error(
                            'ERROR: Plugin "%s": plugin.yml is not valid YAML.'
                            % plugin))
                    print e
                    continue

        allPlugins[plugin] = {
            'name': data.get('name', plugin),
            'description': data.get('description', ''),
            'version': data.get('version', ''),
            'dependencies': set(data.get('dependencies', []))
        }

    return allPlugins
Beispiel #9
0
def findAllPlugins(curConfig=None):
    """
    Walks the plugins directories to find all of the plugins. If the plugin has
    a plugin.json file, this reads that file to determine dependencies.
    """
    allPlugins = {}
    pluginDirs = getPluginDirs(curConfig)
    if not pluginDirs:
        print(TerminalColor.warning('Plugin directory not found. No plugins '
              'loaded.'))
        return allPlugins

    for pluginDir in pluginDirs:
        dirs = [dir for dir in os.listdir(pluginDir) if os.path.isdir(
            os.path.join(pluginDir, dir))]

        for plugin in dirs:
            data = {}
            configJson = os.path.join(pluginDir, plugin, 'plugin.json')
            configYml = os.path.join(pluginDir, plugin, 'plugin.yml')
            if os.path.isfile(configJson):
                with open(configJson) as conf:
                    try:
                        data = json.load(conf)
                    except ValueError as e:
                        print(
                            TerminalColor.error(
                                ('ERROR: Plugin "%s": '
                                 'plugin.json is not valid JSON.') % plugin))
                        print(e)
                        continue
            elif os.path.isfile(configYml):
                with open(configYml) as conf:
                    try:
                        data = yaml.safe_load(conf)
                    except yaml.YAMLError as e:
                        print(
                            TerminalColor.error(
                                ('ERROR: Plugin "%s": '
                                 'plugin.yml is not valid YAML.') % plugin))
                        print(e)
                        continue

            allPlugins[plugin] = {
                'name': data.get('name', plugin),
                'description': data.get('description', ''),
                'version': data.get('version', ''),
                'dependencies': set(data.get('dependencies', []))
            }

    return allPlugins
Beispiel #10
0
def loadPlugins(plugins, root, appconf, apiRoot=None, curConfig=None):
    """
    Loads a set of plugins into the application. The list passed in should not
    already contain dependency information; dependent plugins will be loaded
    automatically.

    :param plugins: The set of plugins to load, by directory name.
    :type plugins: list
    :param root: The root node of the server tree.
    :param appconf: The server's cherrypy configuration object.
    :type appconf: dict
    :returns: A list of plugins that were actually loaded, once dependencies
              were resolved and topological sort was performed.
    """
    # Register a pseudo-package for the root of all plugins. This must be
    # present in the system module list in order to avoid import warnings.
    if curConfig is None:
        curConfig = config.getConfig()

    if 'plugins' in curConfig and 'plugin_directory' in curConfig['plugins']:
        print(TerminalColor.warning(
            'Warning: the plugin_directory setting is deprecated. Please use '
            'the `girder-install plugin` command and remove this setting from '
            'your config file.'))

    if ROOT_PLUGINS_PACKAGE not in sys.modules:
        module = imp.new_module(ROOT_PLUGINS_PACKAGE)
        girder.plugins = module
        sys.modules[ROOT_PLUGINS_PACKAGE] = module

    print(TerminalColor.info('Resolving plugin dependencies...'))

    filteredDepGraph = {
        pluginName: info['dependencies']
        for pluginName, info in six.viewitems(findAllPlugins(curConfig))
        if pluginName in plugins
    }

    for pset in toposort(filteredDepGraph):
        for plugin in pset:
            try:
                root, appconf, apiRoot = loadPlugin(
                    plugin, root, appconf, apiRoot, curConfig=curConfig)
                print(TerminalColor.success('Loaded plugin "{}"'
                                            .format(plugin)))
            except Exception:
                print(TerminalColor.error(
                    'ERROR: Failed to load plugin "{}":'.format(plugin)))
                traceback.print_exc()

    return root, appconf, apiRoot
Beispiel #11
0
def process_annotations(event):
    """Add annotations to an image on a ``data.process`` event"""
    info = event.info
    if 'anot' in info.get('file', {}).get('exts', []):
        reference = info.get('reference', None)

        try:
            reference = json.loads(reference)
        except (ValueError, TypeError):
            print(
                TerminalColor.error(
                    'Warning: Could not get reference from the annotation param. '
                    'Make sure you have at ctk-cli>=1.3.1 installed.'))
            return

        if 'userId' not in reference or 'itemId' not in reference:
            print(
                TerminalColor.error(
                    'Annotation reference does not contain required information.'
                ))
            return

        userId = reference['userId']
        imageId = reference['itemId']

        # load model classes
        Item = ModelImporter.model('item')
        File = ModelImporter.model('file')
        User = ModelImporter.model('user')
        Annotation = ModelImporter.model('annotation', plugin='large_image')

        # load models from the database
        user = User.load(userId, force=True)
        image = File.load(imageId, level=AccessType.READ, user=user)
        item = Item.load(image['itemId'], level=AccessType.WRITE, user=user)
        file = File.load(info.get('file', {}).get('_id'),
                         level=AccessType.READ,
                         user=user)

        if not (item and user and file):
            print(
                TerminalColor.error('Could not load models from the database'))
            return

        try:
            data = json.loads(''.join(File.download(file)()))
        except Exception:
            print(TerminalColor.error('Could not parse annotation file'))
            return

        Annotation.createAnnotation(item, user, data)
def loadPlugins(plugins, root, appconf, apiRoot=None, curConfig=None):
    """
    Loads a set of plugins into the application. The list passed in should not
    already contain dependency information; dependent plugins will be loaded
    automatically.

    :param plugins: The set of plugins to load, by directory name.
    :type plugins: list
    :param root: The root node of the server tree.
    :param appconf: The server's cherrypy configuration object.
    :type appconf: dict
    :returns: A list of plugins that were actually loaded, once dependencies
              were resolved and topological sort was performed.
    """
    # Register a pseudo-package for the root of all plugins. This must be
    # present in the system module list in order to avoid import warnings.
    if curConfig is None:
        curConfig = config.getConfig()

    if ROOT_PLUGINS_PACKAGE not in sys.modules:
        module = imp.new_module(ROOT_PLUGINS_PACKAGE)
        girder.plugins = module
        sys.modules[ROOT_PLUGINS_PACKAGE] = module

    print(TerminalColor.info('Resolving plugin dependencies...'))

    filteredDepGraph = {
        pluginName: info['dependencies']
        for pluginName, info in six.iteritems(findAllPlugins(curConfig))
        if pluginName in plugins
    }

    for pset in toposort(filteredDepGraph):
        for plugin in pset:
            try:
                root, appconf, apiRoot = loadPlugin(plugin,
                                                    root,
                                                    appconf,
                                                    apiRoot,
                                                    curConfig=curConfig)
                print(
                    TerminalColor.success('Loaded plugin "{}"'.format(plugin)))
            except Exception:
                print(
                    TerminalColor.error(
                        'ERROR: Failed to load plugin "{}":'.format(plugin)))
                traceback.print_exc()

    return root, appconf, apiRoot
Beispiel #13
0
def loadPlugins(plugins, root, appconf, apiRoot=None, curConfig=None):
    """
    Loads a set of plugins into the application. The list passed in should not
    already contain dependency information; dependent plugins will be loaded
    automatically.

    :param plugins: The set of plugins to load, by directory name.
    :type plugins: list
    :param root: The root node of the server tree.
    :param appconf: The server's cherrypy configuration object.
    :type appconf: dict
    :returns: A list of plugins that were actually loaded, once dependencies
              were resolved and topological sort was performed.
    """
    # Register a pseudo-package for the root of all plugins. This must be
    # present in the system module list in order to avoid import warnings.
    if curConfig is None:
        curConfig = config.getConfig()
    if "plugins" in curConfig and "plugin_directory" in curConfig["plugins"]:
        pluginDir = curConfig["plugins"]["plugin_directory"]
    elif os.path.exists(os.path.join(PACKAGE_DIR, "plugins")):
        pluginDir = os.path.join(PACKAGE_DIR, "plugins")
    else:
        pluginDir = os.path.join(ROOT_DIR, "plugins")

    if ROOT_PLUGINS_PACKAGE not in sys.modules:
        sys.modules[ROOT_PLUGINS_PACKAGE] = type(
            "", (), {"__path__": pluginDir, "__package__": ROOT_PLUGINS_PACKAGE, "__name__": ROOT_PLUGINS_PACKAGE}
        )()

    print(TerminalColor.info("Resolving plugin dependencies..."))

    filteredDepGraph = {
        pluginName: info["dependencies"]
        for pluginName, info in six.iteritems(findAllPlugins(curConfig))
        if pluginName in plugins
    }

    for pset in toposort(filteredDepGraph):
        for plugin in pset:
            try:
                root, appconf, apiRoot = loadPlugin(plugin, root, appconf, apiRoot, curConfig=curConfig)
                print(TerminalColor.success('Loaded plugin "{}"'.format(plugin)))
            except Exception:
                print(TerminalColor.error('ERROR: Failed to load plugin "{}":'.format(plugin)))
                traceback.print_exc()

    return root, appconf, apiRoot
Beispiel #14
0
def getPluginDir(curConfig=None):
    """
    Returns the /path/to the currently configured plugin directory.
    """
    if curConfig is None:
        curConfig = config.getConfig()

    # This uses the plugin directory specified in the config first.
    if 'plugins' in curConfig and 'plugin_directory' in curConfig['plugins']:
        pluginsDir = curConfig['plugins']['plugin_directory']

    # If none is specified, it looks if there is a plugin directory next
    # to the girder python package.  This is the case when running from the
    # git repository.
    elif os.path.isdir(os.path.join(ROOT_DIR, 'plugins')):
        pluginsDir = os.path.join(ROOT_DIR, 'plugins')

    # As a last resort, use plugins inside the girder python package.
    # This is intended to occur when girder is pip installed.
    else:
        pluginsDir = os.path.join(PACKAGE_DIR, 'plugins')
    if not os.path.exists(pluginsDir):
        try:
            os.makedirs(pluginsDir)
        except OSError:
            if not os.path.exists(pluginsDir):
                print(
                    TerminalColor.warning('Could not create plugin directory.')
                )
                pluginsDir = None
    return pluginsDir
Beispiel #15
0
    def __init__(self):
        self.name = None
        self._indices = []
        self._textIndex = None
        self._textLanguage = None

        self._filterKeys = {
            AccessType.READ: set(),
            AccessType.WRITE: set(),
            AccessType.ADMIN: set(),
            AccessType.SITE_ADMIN: set()
        }

        self.initialize()

        db_connection = getDbConnection()
        self.database = db_connection.get_default_database()
        self.collection = MongoProxy(self.database[self.name])

        for index in self._indices:
            if isinstance(index, (list, tuple)):
                self.collection.ensure_index(index[0], **index[1])
            else:
                self.collection.ensure_index(index)

        if type(self._textIndex) is dict:
            textIdx = [(k, 'text') for k in self._textIndex.keys()]
            try:
                self.collection.ensure_index(
                    textIdx,
                    weights=self._textIndex,
                    default_language=self._textLanguage)
            except pymongo.errors.OperationFailure:
                print(
                    TerminalColor.warning('WARNING: Text search not enabled.'))
def _finish_3d_coords_gen(inchikey, user, future):

    resp = future.result()

    query = {'inchikey': inchikey}

    updates = {}
    updates.setdefault('$unset', {})['generating_3d_coords'] = ''

    if resp.status_code == 200:
        sdf_data = resp.text
        cjson = json.loads(avogadro.convert_str(sdf_data, 'sdf', 'cjson'))
        cjson = whitelist_cjson(cjson)
        updates.setdefault('$set', {})['cjson'] = cjson
    else:
        print('Generating SDF failed!')
        print('Status code was:', resp.status_code)
        print('Reason was:', resp.reason)

    update_result = super(MoleculeModel,
                          MoleculeModel()).update(query, updates)

    if update_result.matched_count == 0:
        raise ValidationException('Invalid inchikey (%s)' % inchikey)

    # Upload the molecule to virtuoso
    try:
        semantic.upload_molecule(MoleculeModel().findOne(query))
    except requests.ConnectionError:
        print(TerminalColor.warning('WARNING: Couldn\'t connect to Jena.'))
Beispiel #17
0
    def reconnect(self):
        """
        Reconnect to the database and rebuild indices if necessary. Users should
        typically not have to call this method.
        """
        db_connection = getDbConnection()
        self.database = db_connection.get_default_database()
        self.collection = MongoProxy(self.database[self.name])

        for index in self._indices:
            if isinstance(index, (list, tuple)):
                self.collection.ensure_index(index[0], **index[1])
            else:
                self.collection.ensure_index(index)

        if type(self._textIndex) is dict:
            textIdx = [(k, 'text') for k in self._textIndex.keys()]
            try:
                self.collection.ensure_index(
                    textIdx,
                    weights=self._textIndex,
                    default_language=self._textLanguage)
            except pymongo.errors.OperationFailure:
                print(
                    TerminalColor.warning('WARNING: Text search not enabled.'))
def getPluginDir(curConfig=None):
    """
    Returns the /path/to the currently configured plugin directory.
    """
    if curConfig is None:
        curConfig = config.getConfig()

    # This uses the plugin directory specified in the config first.
    if "plugins" in curConfig and "plugin_directory" in curConfig["plugins"]:
        pluginsDir = curConfig["plugins"]["plugin_directory"]

    # If none is specified, it looks if there is a plugin directory next
    # to the girder python package.  This is the case when running from the
    # git repository.
    elif os.path.isdir(os.path.join(ROOT_DIR, "plugins")):
        pluginsDir = os.path.join(ROOT_DIR, "plugins")

    # As a last resort, use plugins inside the girder python package.
    # This is intended to occur when girder is pip installed.
    else:
        pluginsDir = os.path.join(PACKAGE_DIR, "plugins")
    if not os.path.exists(pluginsDir):
        try:
            os.makedirs(pluginsDir)
        except OSError:
            if not os.path.exists(pluginsDir):
                print(TerminalColor.warning("Could not create plugin directory."))
                pluginsDir = None
    return pluginsDir
Beispiel #19
0
    def __init__(self):
        self.name = None
        self._indices = []
        self._textIndex = None
        self._textLanguage = None

        self.initialize()

        db_cfg = getDbConfig()
        db_connection = getDbConnection()
        dbName = db_cfg['database']
        self.database = db_connection[dbName]
        self.collection = self.database[self.name]

        for index in self._indices:
            if isinstance(index, (list, tuple)):
                self.collection.ensure_index(index[0], **index[1])
            else:
                self.collection.ensure_index(index)

        if type(self._textIndex) is dict:
            textIdx = [(k, 'text') for k in self._textIndex.keys()]
            try:
                self.collection.ensure_index(
                    textIdx,
                    weights=self._textIndex,
                    default_language=self._textLanguage)
            except pymongo.errors.OperationFailure:
                print(
                    TerminalColor.warning('WARNING: Text search not enabled.'))
Beispiel #20
0
def getPluginDirs(curConfig=None):
    """Return an ordered list of directories that plugins can live in."""
    failedPluginDirs = set()

    if curConfig is None:
        curConfig = config.getConfig()

    if 'plugins' in curConfig and 'plugin_directory' in curConfig['plugins']:
        pluginDirs = curConfig['plugins']['plugin_directory'].split(':')
    else:
        pluginDirs = [defaultPluginDir()]

    for pluginDir in pluginDirs:
        if not os.path.exists(pluginDir):
            try:
                os.makedirs(pluginDir)
            except OSError:
                if not os.path.exists(pluginDir):
                    print(
                        TerminalColor.warning(
                            'Could not create plugin directory %s.' %
                            pluginDir))

                    failedPluginDirs.add(pluginDir)

    return [dir for dir in pluginDirs if dir not in failedPluginDirs]
Beispiel #21
0
    def errorResponse(self, reason='A parameter was invalid.', code=400):
        """
        This helper will build an errorResponse declaration for you. Many
        endpoints will be able to use the default parameter values for one of
        their responses.

        :param reason: The reason or list of reasons why the error occurred.
        :type reason: str, list, or tuple
        :param code: HTTP status code.
        :type code: int
        """
        code = str(code)

        # Combine list of reasons into a single string.
        # swagger-ui renders the description using Markdown.
        if not isinstance(reason, six.string_types):
            reason = '\n\n'.join(reason)

        if code in self._responses:
            print(TerminalColor.warning(
                "WARNING: Error response for code '%s' is already defined "
                "(old: '%s', new: '%s')"
                % (code, self._responses[code]['description'], reason)))

        self._responses[code] = {
            'description': reason
        }

        return self
Beispiel #22
0
    def __init__(self):
        self.name = None
        self._indices = []
        self._textIndex = None
        self._textLanguage = None

        self._filterKeys = {
            AccessType.READ: set(),
            AccessType.WRITE: set(),
            AccessType.ADMIN: set(),
            AccessType.SITE_ADMIN: set()
        }

        self.initialize()

        db_connection = getDbConnection()
        self.database = db_connection.get_default_database()
        self.collection = MongoProxy(self.database[self.name])

        for index in self._indices:
            if isinstance(index, (list, tuple)):
                self.collection.ensure_index(index[0], **index[1])
            else:
                self.collection.ensure_index(index)

        if type(self._textIndex) is dict:
            textIdx = [(k, 'text') for k in self._textIndex.keys()]
            try:
                self.collection.ensure_index(
                    textIdx, weights=self._textIndex,
                    default_language=self._textLanguage)
            except pymongo.errors.OperationFailure:
                print(
                    TerminalColor.warning('WARNING: Text search not enabled.'))
Beispiel #23
0
    def __init__(self):
        self.name = None
        self._indices = []
        self._textIndex = None
        self._textLanguage = None

        self.initialize()

        db_cfg = getDbConfig()
        db_connection = getDbConnection()
        dbName = db_cfg['database']
        self.database = db_connection[dbName]
        self.collection = self.database[self.name]

        for index in self._indices:
            if isinstance(index, (list, tuple)):
                self.collection.ensure_index(index[0], **index[1])
            else:
                self.collection.ensure_index(index)

        if type(self._textIndex) is dict:
            textIdx = [(k, 'text') for k in self._textIndex.keys()]
            try:
                self.collection.ensure_index(
                    textIdx, weights=self._textIndex,
                    default_language=self._textLanguage)
            except pymongo.errors.OperationFailure:
                print(
                    TerminalColor.warning('WARNING: Text search not enabled.'))
 def _on_complete(mol):
     # Upload the molecule to Jen
     try:
         semantic.upload_molecule(mol)
     except requests.ConnectionError:
         print(
             TerminalColor.warning(
                 'WARNING: Couldn\'t connect to Jena.'))
Beispiel #25
0
    def _validateParamInfo(self, dataType, paramType, name):
        """
        Helper to convert and validate the dataType and paramType.
        Prints warnings if invalid values were passed.
        """
        # Legacy data type conversions
        if dataType == 'int':
            dataType = 'integer'

        # Parameter Object spec:
        # If type is "file", then the swagger "consumes" field MUST be either
        # "multipart/form-data", "application/x-www-form-urlencoded" or both
        # and the parameter MUST be in "formData".
        if dataType == 'file':
            paramType = 'formData'

        # Get type and format from common name
        dataTypeFormat = None
        if dataType in self._dataTypeMap:
            dataType, dataTypeFormat = self._dataTypeMap[dataType]
        # If we are dealing with the body then the dataType might be defined
        # by a schema added using addModel(...), we don't know for sure as we
        # don't know the resource name here to look it up.
        elif paramType != 'body':
            print(
                TerminalColor.warning(
                    'WARNING: Invalid dataType "%s" specified for parameter names "%s"'
                    % (dataType, name)))

        # Parameter Object spec:
        # Since the parameter is not located at the request body, it is limited
        # to simple types (that is, not an object).
        if paramType != 'body' and dataType not in (
                'string', 'number', 'integer', 'long', 'boolean', 'array',
                'file', 'float', 'double', 'date', 'dateTime'):
            print(
                TerminalColor.warning(
                    'WARNING: Invalid dataType "%s" specified for parameter "%s"'
                    % (dataType, name)))

        if paramType == 'form':
            paramType = 'formData'

        return dataType, dataTypeFormat, paramType
Beispiel #26
0
def findEntryPointPlugins(allPlugins):
    # look for plugins enabled via setuptools `entry_points`
    for entry_point in iter_entry_points(group='girder.plugin'):
        # set defaults
        allPlugins[entry_point.name] = {
            'name': entry_point.name,
            'description': '',
            'version': '',
            'dependencies': set()
        }
        configJson = os.path.join('girder', 'plugin.json')
        configYml = os.path.join('girder', 'plugin.yml')
        data = {}
        try:
            if pkg_resources.resource_exists(entry_point.name, configJson):
                with pkg_resources.resource_stream(entry_point.name,
                                                   configJson) as conf:
                    try:
                        data = json.load(codecs.getreader('utf8')(conf))
                    except ValueError as e:
                        print(
                            TerminalColor.error(
                                'ERROR: Plugin "%s": plugin.json is not valid '
                                'JSON.' % entry_point.name))
                        print(e)
            elif pkg_resources.resource_exists(entry_point.name, configYml):
                with pkg_resources.resource_stream(entry_point.name,
                                                   configYml) as conf:
                    try:
                        data = yaml.safe_load(conf)
                    except yaml.YAMLError as e:
                        print(
                            TerminalColor.error(
                                'ERROR: Plugin "%s": plugin.yml is not valid '
                                'YAML.' % entry_point.name))
                        print(e)
        except ImportError:
            pass
        if data == {}:
            data = getattr(entry_point.load(), 'config', {})
        allPlugins[entry_point.name].update(data)
        allPlugins[entry_point.name]['dependencies'] = set(
            allPlugins[entry_point.name]['dependencies'])
Beispiel #27
0
def findEntryPointPlugins(allPlugins):
    # look for plugins enabled via setuptools `entry_points`
    for entry_point in iter_entry_points(group='girder.plugin'):
        # set defaults
        allPlugins[entry_point.name] = {
            'name': entry_point.name,
            'description': '',
            'version': '',
            'dependencies': set()
        }
        configJson = os.path.join('girder', 'plugin.json')
        configYml = os.path.join('girder', 'plugin.yml')
        data = {}
        try:
            if pkg_resources.resource_exists(entry_point.name, configJson):
                with pkg_resources.resource_stream(
                        entry_point.name, configJson) as conf:
                    try:
                        data = json.load(codecs.getreader('utf8')(conf))
                    except ValueError as e:
                        print(
                            TerminalColor.error(
                                'ERROR: Plugin "%s": plugin.json is not valid '
                                'JSON.' % entry_point.name))
                        print(e)
            elif pkg_resources.resource_exists(entry_point.name, configYml):
                with pkg_resources.resource_stream(
                        entry_point.name, configYml) as conf:
                    try:
                        data = yaml.safe_load(conf)
                    except yaml.YAMLError as e:
                        print(
                            TerminalColor.error(
                                'ERROR: Plugin "%s": plugin.yml is not valid '
                                'YAML.' % entry_point.name))
                        print(e)
        except ImportError:
            pass
        if data == {}:
            data = getattr(entry_point.load(), 'config', {})
        allPlugins[entry_point.name].update(data)
        allPlugins[entry_point.name]['dependencies'] = set(
            allPlugins[entry_point.name]['dependencies'])
def findAllPlugins(curConfig=None):
    """
    Walks the plugins directory to find all of the plugins. If the plugin has
    a plugin.json file, this reads that file to determine dependencies.
    """
    allPlugins = {}
    pluginsDir = getPluginDir(curConfig)
    if not pluginsDir:
        print(TerminalColor.warning("Plugin directory not found. No plugins " "loaded."))
        return allPlugins
    dirs = [dir for dir in os.listdir(pluginsDir) if os.path.isdir(os.path.join(pluginsDir, dir))]

    for plugin in dirs:
        data = {}
        configJson = os.path.join(pluginsDir, plugin, "plugin.json")
        configYml = os.path.join(pluginsDir, plugin, "plugin.yml")
        if os.path.isfile(configJson):
            with open(configJson) as conf:
                try:
                    data = json.load(conf)
                except ValueError as e:
                    print(TerminalColor.error('ERROR: Plugin "%s": plugin.json is not valid JSON.' % plugin))
                    print(e)
                    continue
        elif os.path.isfile(configYml):
            with open(configYml) as conf:
                try:
                    data = yaml.safe_load(conf)
                except yaml.YAMLError as e:
                    print(TerminalColor.error('ERROR: Plugin "%s": plugin.yml is not valid YAML.' % plugin))
                    print(e)
                    continue

        allPlugins[plugin] = {
            "name": data.get("name", plugin),
            "description": data.get("description", ""),
            "version": data.get("version", ""),
            "dependencies": set(data.get("dependencies", [])),
        }

    return allPlugins
Beispiel #29
0
    def route(self, method, route, handler, nodoc=False):
        """
        Define a route for your REST resource.

        :param method: The HTTP method, e.g. 'GET', 'POST', 'PUT'
        :type method: str
        :param route: The route, as a list of path params relative to the
        resource root. Elements of this list starting with ':' are assumed to
        be wildcards.
        :type route: list
        :param handler: The method to be called if the route and method are
        matched by a request. Wildcards in the route will be expanded and
        passed as kwargs with the same name as the wildcard identifier.
        :type handler: function
        :param nodoc: If your route intentionally provides no documentation,
                      set this to True to disable the warning on startup.
        :type nodoc: bool
        """
        if not hasattr(self, '_routes'):
            self._routes = {
                'get': [],
                'post': [],
                'put': [],
                'delete': []
            }
        self._routes[method.lower()].append((route, handler))

        # Now handle the api doc if the handler has any attached
        resourceName = handler.im_class.__module__.rsplit('.', 1)[-1]
        if hasattr(handler, 'description'):
            docs.addRouteDocs(
                resource=resourceName, route=route, method=method,
                info=handler.description.asDict(), handler=handler)
        elif not nodoc:
            routePath = '/'.join([resourceName] + list(route))
            print TerminalColor.warning(
                'WARNING: No description docs present for route {} {}'
                .format(method, routePath))
Beispiel #30
0
    def _ensureInit(self):
        """
        Calls ``Resource.__init__`` if the subclass constructor did not already
        do so.

        In the past, Resource subclasses were not expected to call their
        superclass constructor.
        """
        if not hasattr(self, '_routes'):
            Resource.__init__(self)
            print(TerminalColor.warning(
                'WARNING: Resource subclass "%s" did not call '
                '"Resource__init__()" from its constructor.' %
                self.__class__.__name__))
Beispiel #31
0
    def _ensureInit(self):
        """
        Calls ``Resource.__init__`` if the subclass constructor did not already
        do so.

        In the past, Resource subclasses were not expected to call their
        superclass constructor.
        """
        if not hasattr(self, '_routes'):
            Resource.__init__(self)
            print(
                TerminalColor.warning(
                    'WARNING: Resource subclass "%s" did not call '
                    '"Resource__init__()" from its constructor.' %
                    self.__class__.__name__))
Beispiel #32
0
def createNotebooks(event):

    # If there is no current asset store, just return
    try:
        Assetstore().getCurrent()
    except GirderException:
        print(
            TerminalColor.warning('WARNING: no current asset store. '
                                  'Notebook will not be created.'))
        return

    user = event.info
    folder_model = Folder()

    result = lookUpPath('user/%s/Private' % user['login'], force=True)
    private_folder = result['document']

    oc_folder = folder_model.createFolder(private_folder,
                                          'oc',
                                          parentType='folder',
                                          creator=user,
                                          public=True,
                                          reuseExisting=True)

    notebook_folder = folder_model.createFolder(oc_folder,
                                                'notebooks',
                                                parentType='folder',
                                                creator=user,
                                                public=True,
                                                reuseExisting=True)

    notebooks_dir = os.path.join(os.path.dirname(__file__), 'notebooks')

    upload_model = Upload()
    for file in glob.glob('%s/*.ipynb' % notebooks_dir):
        size = os.path.getsize(file)
        name = os.path.basename(file)
        with open(file, 'rb') as fp:
            upload_model.uploadFromFile(
                fp,
                size=size,
                name=name,
                parentType='folder',
                parent={'_id': ObjectId(notebook_folder['_id'])},
                user=user,
                mimeType='application/x-ipynb+json')
Beispiel #33
0
    def addDeps(plugin):
        if plugin not in allPlugins:
            message = 'Required plugin %s does not exist.' % plugin
            if ignoreMissing:
                print(TerminalColor.error(message))
                girder.logger.error(message)
                return
            else:
                raise ValidationException(message)

        deps = allPlugins[plugin]['dependencies']
        dag[plugin] = deps

        for dep in deps:
            if dep in visited:
                return
            visited.add(dep)
            addDeps(dep)
Beispiel #34
0
    def addDeps(plugin):
        if plugin not in allPlugins:
            message = 'Required plugin %s does not exist.' % plugin
            if ignoreMissing:
                print(TerminalColor.error(message))
                girder.logger.error(message)
                return
            else:
                raise ValidationException(message)

        deps = allPlugins[plugin]['dependencies']
        dag[plugin] = deps

        for dep in deps:
            if dep in visited:
                return
            visited.add(dep)
            addDeps(dep)
Beispiel #35
0
def addModel(name, model, resources=None, silent=False):
    """
    Add a model to the Swagger documentation.

    :param resources: The type(s) of resource(s) to add the model to. New
        resource types may be implicitly defined, with the expectation that
        routes will be added for them at some point. If no resources are
        passed, the model will be exposed for every resource type
    :param resources: str or tuple/list[str]
    :param name: The name of the model.
    :type name: str
    :param model: The model to add.
    :type model: dict
    :param silent: Set this to True to suppress warnings.
    :type silent: bool

    .. warning:: This is a low-level API which does not validate the format of
       ``model``. See the `Swagger Model documentation`_ for a complete
       specification of the correct format for ``model``.

    .. versionchanged:: The syntax and behavior of this function was modified
        after v1.3.2. The previous implementation did not include a resources
        parameter.

    .. _Swagger Model documentation: https://github.com/swagger-api/
       swagger-spec/blob/d79c205485d702302003d4de2f2c980d1caf10f9/
       versions/1.2.md#527-model-object
    """
    if resources:
        if isinstance(resources, six.string_types):
            resources = (resources, )
        for resource in resources:
            models[resource][name] = model
    else:
        if not silent:
            print(
                TerminalColor.warning(
                    'WARNING: adding swagger models without specifying resources '
                    'to bind to is discouraged (%s).' % name))
        models[None][name] = model
Beispiel #36
0
def addModel(name, model, resources=None, silent=False):
    """
    Add a model to the Swagger documentation.

    :param resources: The type(s) of resource(s) to add the model to. New
        resource types may be implicitly defined, with the expectation that
        routes will be added for them at some point. If no resources are
        passed, the model will be exposed for every resource type
    :param resources: str or tuple/list[str]
    :param name: The name of the model.
    :type name: str
    :param model: The model to add.
    :type model: dict
    :param silent: Set this to True to suppress warnings.
    :type silent: bool

    .. warning:: This is a low-level API which does not validate the format of
       ``model``. See the `Swagger Model documentation`_ for a complete
       specification of the correct format for ``model``.

    .. versionchanged:: The syntax and behavior of this function was modified
        after v1.3.2. The previous implementation did not include a resources
        parameter.

    .. _Swagger Model documentation: https://github.com/swagger-api/
       swagger-spec/blob/d79c205485d702302003d4de2f2c980d1caf10f9/
       versions/1.2.md#527-model-object
    """
    if resources:
        if isinstance(resources, six.string_types):
            resources = (resources,)
        for resource in resources:
            models[resource][name] = model
    else:
        if not silent:
            print(TerminalColor.warning(
                'WARNING: adding swagger models without specifying resources '
                'to bind to is discouraged (%s).' % name))
        models[None][name] = model
Beispiel #37
0
def getPluginDirs(curConfig=None):
    """Return an ordered list of directories that plugins can live in."""
    failedPluginDirs = set()

    if curConfig is None:
        curConfig = _config.getConfig()

    if 'plugins' in curConfig and 'plugin_directory' in curConfig['plugins']:
        pluginDirs = curConfig['plugins']['plugin_directory'].split(':')
    else:
        pluginDirs = [defaultPluginDir()]

    for pluginDir in pluginDirs:
        try:
            mkdir(pluginDir)
        except OSError:
            print(TerminalColor.warning(
                'Could not create plugin directory %s.' % pluginDir))

            failedPluginDirs.add(pluginDir)

    return [dir for dir in pluginDirs if dir not in failedPluginDirs]
Beispiel #38
0
    def reconnect(self):
        """
        Reconnect to the database and rebuild indices if necessary. Users should
        typically not have to call this method.
        """
        db_connection = getDbConnection()
        self.database = db_connection.get_default_database()
        self.collection = MongoProxy(self.database[self.name])

        for index in self._indices:
            if isinstance(index, (list, tuple)):
                self.collection.create_index(index[0], **index[1])
            else:
                self.collection.create_index(index)

        if type(self._textIndex) is dict:
            textIdx = [(k, 'text') for k in six.viewkeys(self._textIndex)]
            try:
                self.collection.create_index(
                    textIdx, weights=self._textIndex,
                    default_language=self._textLanguage)
            except pymongo.errors.OperationFailure:
                print(
                    TerminalColor.warning('WARNING: Text search not enabled.'))
Beispiel #39
0
def loadPlugins(plugins, root, appconf):
    """
    Loads a set of plugins into the application. The list passed in should not
    already contain dependency information; dependent plugins will be loaded
    automatically.

    :param plugins: The set of plugins to load, by directory name.
    :type plugins: list
    :param root: The root node of the server tree.
    :param appconf: The server's cherrypy configuration object.
    :type appconf: dict
    :returns: A list of plugins that were actually loaded, once dependencies
              were resolved and topological sort was performed.
    """
    # Register a pseudo-package for the root of all plugins. This must be
    # present in the system module list in order to avoid import warnings.
    if ROOT_PLUGINS_PACKAGE not in sys.modules:
        sys.modules[ROOT_PLUGINS_PACKAGE] = type(
            '', (), {
                '__path__': os.path.join(ROOT_DIR, 'plugins'),
                '__package__': ROOT_PLUGINS_PACKAGE,
                '__name__': ROOT_PLUGINS_PACKAGE
            })()

    print TerminalColor.info('Resolving plugin dependencies...')

    filteredDepGraph = {
        pluginName: info['dependencies']
        for pluginName, info in findAllPlugins().iteritems()
        if pluginName in plugins
    }

    for pset in toposort(filteredDepGraph):
        for plugin in pset:
            try:
                loadPlugin(plugin, root, appconf)
                print TerminalColor.success(
                    'Loaded plugin "{}"'.format(plugin))
            except:
                print TerminalColor.error(
                    'ERROR: Failed to load plugin "{}":'.format(plugin))
                traceback.print_exc()
Beispiel #40
0
def loadPlugins(plugins, root):
    """
    Loads a set of plugins into the application. The list passed in should not
    already contain dependency information; dependent plugins will be loaded
    automatically.

    :param plugins: The set of plugins to load, by directory name.
    :type plugins: list
    :param root: The root node of the server tree.
    :returns: A list of plugins that were actually loaded, once dependencies
              were resolved and topological sort was performed.
    """
    # Register a pseudo-package for the root of all plugins. This must be
    # present in the system module list in order to avoid import warnings.
    if not ROOT_PLUGINS_PACKAGE in sys.modules:
        sys.modules[ROOT_PLUGINS_PACKAGE] = type(
            "",
            (),
            {
                "__path__": os.path.join(ROOT_DIR, "plugins"),
                "__package__": ROOT_PLUGINS_PACKAGE,
                "__name__": ROOT_PLUGINS_PACKAGE,
            },
        )()

    print TerminalColor.info("Resolving plugin dependencies...")

    filteredDepGraph = {
        pluginName: info["dependencies"] for pluginName, info in findAllPlugins().iteritems() if pluginName in plugins
    }

    for pset in toposort(filteredDepGraph):
        for plugin in pset:
            try:
                loadPlugin(plugin, root)
                print TerminalColor.success('Loaded plugin "{}"'.format(plugin))
            except:
                print TerminalColor.error('ERROR: Failed to load plugin "{}":'.format(plugin))
                traceback.print_exc()
Beispiel #41
0
    def create(self, params):
        body = self.getBodyJson()
        user = self.getCurrentUser()
        public = body.get('public', False)
        if 'fileId' in body:
            file_id = body['fileId']
            calc_id = body.get('calculationId')
            file = self.model('file').load(file_id, user=user)
            parts = file['name'].split('.')
            input_format = parts[-1]
            name = '.'.join(parts[:-1])

            if input_format not in Molecule.input_formats:
                raise RestException('Input format not supported.', code=400)

            contents = functools.reduce(
                lambda x, y: x + y,
                self.model('file').download(file, headers=False)())
            data_str = contents.decode()

            # For now piggy backing experimental results upload here!
            # This should be refactored ...
            json_data = json.loads(data_str)
            if 'experiment' in json_data:
                return self._process_experimental(json_data)

            # Use the SDF format as it is the one with bonding that 3Dmol uses.
            output_format = 'sdf'

            if input_format == 'pdb':
                (output, _) = openbabel.convert_str(data_str, input_format,
                                                    output_format)
            else:
                output = avogadro.convert_str(data_str, input_format,
                                              output_format)

            # Get some basic molecular properties we want to add to the database.
            props = avogadro.molecule_properties(data_str, input_format)
            pieces = props['spacedFormula'].strip().split(' ')
            atomCounts = {}
            for i in range(0, int(len(pieces) / 2)):
                atomCounts[pieces[2 * i]] = int(pieces[2 * i + 1])

            cjson = []
            if input_format == 'cjson':
                cjson = json.loads(data_str)
            elif input_format == 'pdb':
                cjson = json.loads(avogadro.convert_str(
                    output, 'sdf', 'cjson'))
            else:
                cjson = json.loads(
                    avogadro.convert_str(data_str, input_format, 'cjson'))

            atom_count = openbabel.atom_count(data_str, input_format)

            if atom_count > 1024:
                raise RestException(
                    'Unable to generate inchi, molecule has more than 1024 atoms .',
                    code=400)

            (inchi, inchikey) = openbabel.to_inchi(output, 'sdf')

            if not inchi:
                raise RestException('Unable to extract inchi', code=400)

            # Check if the molecule exists, only create it if it does.
            molExists = self._model.find_inchikey(inchikey)
            mol = {}
            if molExists:
                mol = molExists
            else:
                # Whitelist parts of the CJSON that we store at the top level.
                cjsonmol = {}
                cjsonmol['atoms'] = cjson['atoms']
                cjsonmol['bonds'] = cjson['bonds']
                cjsonmol['chemical json'] = cjson['chemical json']
                mol = self._model.create_xyz(
                    user, {
                        'name':
                        chemspider.find_common_name(inchikey,
                                                    props['formula']),
                        'inchi':
                        inchi,
                        'inchikey':
                        inchikey,
                        output_format:
                        output,
                        'cjson':
                        cjsonmol,
                        'properties':
                        props,
                        'atomCounts':
                        atomCounts
                    }, public)

                # Upload the molecule to virtuoso
                try:
                    semantic.upload_molecule(mol)
                except requests.ConnectionError:
                    print(
                        TerminalColor.warning(
                            'WARNING: Couldn\'t connect to virtuoso.'))

            if 'vibrations' in cjson or 'basisSet' in cjson:
                # We have some calculation data, let's add it to the calcs.
                sdf = output
                moleculeId = mol['_id']
                calc_props = {}

                if calc_id is not None:
                    calc = self._calc_model.load(calc_id,
                                                 user=user,
                                                 level=AccessType.ADMIN)
                    calc_props = calc['properties']
                    # The calculation is no longer pending
                    if 'pending' in calc_props:
                        del calc_props['pending']

                if input_format == 'json':
                    jsonInput = json.loads(data_str)
                    # Don't override existing properties
                    new_calc_props = avogadro.calculation_properties(jsonInput)
                    new_calc_props.update(calc_props)
                    calc_props = new_calc_props

                # Use basisSet from cjson if we don't already have one.
                if 'basisSet' in cjson and 'basisSet' not in calc_props:
                    calc_props['basisSet'] = cjson['basisSet']

                # Use functional from cjson properties if we don't already have
                # one.
                functional = parse('properties.functional').find(cjson)
                if functional and 'functional' not in calc_props:
                    calc_props['functional'] = functional[0].value

                # Add theory priority to 'sort' calculations
                theory = calc_props.get('theory')
                functional = calc_props.get('functional')
                if theory in constants.theory_priority:
                    priority = constants.theory_priority[theory]
                    calc_props['theoryPriority'] = priority

                if calc_id is not None:
                    calc['properties'] = calc_props
                    calc['cjson'] = cjson
                    calc['fileId'] = file_id
                    self._calc_model.save(calc)
                else:
                    self._calc_model.create_cjson(user, cjson, calc_props,
                                                  moleculeId, file_id, public)

        elif 'xyz' in body or 'sdf' in body:

            if 'xyz' in body:
                input_format = 'xyz'
                data = body['xyz']
            else:
                input_format = 'sdf'
                data = body['sdf']

            (inchi, inchikey) = openbabel.to_inchi(data, input_format)

            mol = {'inchi': inchi, 'inchikey': inchikey, input_format: data}

            if 'name' in body:
                mol['name'] = body['name']

            mol = self._model.create_xyz(user, mol, public)
        elif 'inchi' in body:
            inchi = body['inchi']
            mol = self._model.create(user, inchi, public)
        else:
            raise RestException('Invalid request', code=400)

        return self._clean(mol)
Beispiel #42
0
    def param(self, name, description, paramType='query', dataType='string',
              required=True, enum=None, default=None):
        """
        This helper will build a parameter declaration for you. It has the most
        common options as defaults, so you won't have to repeat yourself as much
        when declaring the APIs.

        Note that we could expose more parameters from the Parameter Object
        spec, for example: format, allowEmptyValue, minimum, maximum, pattern,
        uniqueItems.

        :param name: name of the parameter used in the REST query.
        :param description: explanation of the parameter.
        :param paramType: how is the parameter sent.  One of 'query', 'path',
                          'body', 'header', or 'formData'.
        :param dataType: the data type expected in the parameter. This is one
                         of 'integer', 'long', 'float', 'double', 'string',
                         'byte', 'binary', 'boolean', 'date', 'dateTime',
                         'password', or 'file'.
        :param required: True if the request will fail if this parameter is not
                         present, False if the parameter is optional.
        :param enum: a fixed list of possible values for the field.
        """
        # Legacy data type conversions
        if dataType == 'int':
            dataType = 'integer'
        elif dataType == 'File':
            print(TerminalColor.warning(
                "WARNING: dataType 'File' should be updated to 'file'"))
            dataType = 'file'

        # Get type and format from common name
        dataTypeFormat = None
        if dataType in self._dataTypeMap:
            (dataType, dataTypeFormat) = self._dataTypeMap[dataType]
        else:
            print(TerminalColor.warning(
                "WARNING: Invalid dataType '%s' specified for parameter "
                "named '%s'" % (dataType, name)))

        # Parameter Object spec:
        # Since the parameter is not located at the request body, it is limited
        # to simple types (that is, not an object).
        if paramType != 'body':
            if dataType not in ('string', 'number', 'integer', 'boolean',
                                'array', 'file'):
                print(TerminalColor.warning(
                    "WARNING: Invalid dataType '%s' specified for parameter "
                    "named '%s'" % (dataType, name)))

        if paramType == 'form':
            print(TerminalColor.warning(
                "WARNING: paramType 'form' should be updated to 'formData'"))
            paramType = 'formData'

        # Parameter Object spec:
        # If type is "file", then consumes MUST be either
        # "multipart/form-data", "application/x-www-form-urlencoded" or both
        # and the parameter MUST be in "formData".
        if dataType == 'file':
            if paramType != 'formData':
                print(TerminalColor.warning(
                    "WARNING: Invalid paramType '%s' specified for dataType "
                    "'file' in parameter named '%s'"
                    % (paramType, name)))
                paramType = 'formData'

        param = {
            'name': name,
            'description': description,
            'in': paramType,
            'required': required
        }

        if paramType == 'body':
            param['schema'] = {
                'type': dataType
            }
        else:
            param['type'] = dataType

        if dataTypeFormat is not None:
            param['format'] = dataTypeFormat

        if enum:
            param['enum'] = enum

        if default is not None:
            param['default'] = default

        self._params.append(param)
        return self
Beispiel #43
0
def create_molecule(data_str,
                    input_format,
                    user,
                    public,
                    gen3d=True,
                    provenance='uploaded by user'):

    using_2d_format = (input_format in openbabel_2d_formats)
    inchi_format = 'inchi'

    if using_2d_format or input_format in openbabel_3d_formats:
        inchi, inchikey = openbabel.to_inchi(data_str, input_format)
    else:
        sdf_data = avogadro.convert_str(data_str, input_format, 'sdf')
        inchi, inchikey = openbabel.to_inchi(sdf_data, 'sdf')

    if not inchi:
        raise RestException('Unable to extract InChI', code=400)

    # Check if the molecule exists, only create it if it does.
    molExists = MoleculeModel().find_inchikey(inchikey)
    mol = {}
    if molExists:
        mol = molExists
    else:
        # Get some basic molecular properties we want to add to the
        # database.
        # Use sdf without 3d generation for avogadro's molecule properties
        sdf_no_3d = openbabel.gen_sdf_no_3d(inchi, inchi_format)[0]
        props = avogadro.molecule_properties(sdf_no_3d, 'sdf')
        smiles = openbabel.to_smiles(inchi, inchi_format)

        pieces = props['spacedFormula'].strip().split(' ')
        atomCounts = {}
        for i in range(0, int(len(pieces) / 2)):
            atomCounts[pieces[2 * i]] = int(pieces[2 * i + 1])

        mol_dict = {
            'inchi': inchi,
            'inchikey': inchikey,
            'smiles': smiles,
            'properties': props,
            'atomCounts': atomCounts,
            'provenance': provenance
        }

        # Generate an svg file for an image
        schedule_svg_gen(mol_dict, user)

        # Set a name if we find one
        name = chemspider.find_common_name(inchikey)
        if name is not None:
            mol_dict['name'] = name

        cjson = {}
        if input_format == 'cjson':
            cjson = json.loads(data_str)

        if not cjson and using_2d_format:
            # Generate 3d coordinates in a background thread
            if gen3d:
                schedule_3d_coords_gen(mol_dict, user)
            # This will be complete other than the cjson
            return MoleculeModel().create(user, mol_dict, public)
        else:
            if input_format in openbabel_3d_formats:
                sdf_data, mime = openbabel.convert_str(data_str, input_format,
                                                       'sdf')
                cjson = json.loads(
                    avogadro.convert_str(sdf_data, 'sdf', 'cjson'))
            else:
                cjson = json.loads(
                    avogadro.convert_str(data_str, input_format, 'cjson'))

        mol_dict['cjson'] = whitelist_cjson(cjson)

        mol = MoleculeModel().create(user, mol_dict, public)

        # Upload the molecule to virtuoso
        try:
            semantic.upload_molecule(mol)
        except requests.ConnectionError:
            print(TerminalColor.warning('WARNING: Couldn\'t connect to Jena.'))

    return mol
import os
from chemspipy import ChemSpider
from girder.constants import TerminalColor

try:
    chemspikey = os.environ['chemspikey']
except KeyError:
    chemspikey = None
    print(
        TerminalColor.warning(
            'WARNING: chemspikey not set, common names will not be resolved.'))


def find_common_name(inchikey):
    # Try to find the common name for the compound, if not, return None.

    name = None

    if chemspikey:
        cs = ChemSpider(chemspikey)

        if (len(inchikey) > 0):
            result = cs.search(inchikey)
            if (len(result) == 1):
                name = result[0].common_name

    return name
Beispiel #45
0
def findAllPlugins(curConfig=None):
    """
    Walks the plugins directories to find all of the plugins. If the plugin has
    a plugin.json file, this reads that file to determine dependencies.
    """
    allPlugins = {}

    # look for plugins enabled via setuptools `entry_points`
    for entry_point in iter_entry_points(group='girder.plugin'):
        # set defaults
        allPlugins[entry_point.name] = {
            'name': entry_point.name,
            'description': '',
            'version': '',
            'dependencies': set()
        }
        allPlugins[entry_point.name].update(
            getattr(entry_point.load(), 'config', {})
        )

    pluginDirs = getPluginDirs(curConfig)
    if not pluginDirs:
        print(TerminalColor.warning('Plugin directory not found.'))
        return allPlugins

    for pluginDir in pluginDirs:
        dirs = [dir for dir in os.listdir(pluginDir) if os.path.isdir(
            os.path.join(pluginDir, dir))]

        for plugin in dirs:
            data = {}
            configJson = os.path.join(pluginDir, plugin, 'plugin.json')
            configYml = os.path.join(pluginDir, plugin, 'plugin.yml')
            if os.path.isfile(configJson):
                with open(configJson) as conf:
                    try:
                        data = json.load(conf)
                    except ValueError as e:
                        print(
                            TerminalColor.error(
                                ('ERROR: Plugin "%s": '
                                 'plugin.json is not valid JSON.') % plugin))
                        print(e)
            elif os.path.isfile(configYml):
                with open(configYml) as conf:
                    try:
                        data = yaml.safe_load(conf)
                    except yaml.YAMLError as e:
                        print(
                            TerminalColor.error(
                                ('ERROR: Plugin "%s": '
                                 'plugin.yml is not valid YAML.') % plugin))
                        print(e)

            allPlugins[plugin] = {
                'name': data.get('name', plugin),
                'description': data.get('description', ''),
                'version': data.get('version', ''),
                'dependencies': set(data.get('dependencies', []))
            }

    return allPlugins
Beispiel #46
0
from .. import yaml_importer  # noqa

# This is the list of jobs we want to load.  It could be changed to search the
# local directory for .yml files
_jobList = ['kmeans', 'surv', 'iGPSe', 'iGPSePart2', 'silhouette']

# This is the list of jobs we succeeded in loading.  Other modules can use this
# list to determine which jobs are available
jobList = []

# from each job, import doc as (job)
for job in _jobList:
    try:
        _temp = __import__(job, globals(), locals(), ['doc'], -1)
        if not 'task' in _temp.doc:
            print(TerminalColor.error(
                  'ERROR: Job not specified peroperly "%s":' % job))
            girder.logger.info('Job not specified properly: %s' % job)
            continue
        globals()[job] = _temp.doc
        jobList.append(job)
    except yaml.parser.ParserError:
        print(TerminalColor.error(
              'ERROR: Failed to parse job "%s":' % job))
        girder.logger.exception('Job yaml parse error: %s' % job)
    except Exception:
        print(TerminalColor.error(
              'ERROR: Failed to load job "%s":' % job))
        girder.logger.exception('Job load failure: %s' % job)

__all__ = jobList
Beispiel #47
0
    def route(self, method, route, handler, nodoc=False, resource=None):
        """
        Define a route for your REST resource.

        :param method: The HTTP method, e.g. 'GET', 'POST', 'PUT'
        :type method: str
        :param route: The route, as a list of path params relative to the
        resource root. Elements of this list starting with ':' are assumed to
        be wildcards.
        :type route: list
        :param handler: The method to be called if the route and method are
        matched by a request. Wildcards in the route will be expanded and
        passed as kwargs with the same name as the wildcard identifier.
        :type handler: function
        :param nodoc: If your route intentionally provides no documentation,
                      set this to True to disable the warning on startup.
        :type nodoc: bool
        """
        if not hasattr(self, '_routes'):
            self._routes = collections.defaultdict(
                lambda: collections.defaultdict(list))

        # Insertion sort to maintain routes in required order.
        def shouldInsert(a, b):
            """
            Return bool representing whether route a should go before b. Checks
            by comparing each token in order and making sure routes with
            literals in forward positions come before routes with wildcards
            in those positions.
            """
            for i in xrange(0, len(a)):
                if a[i][0] != ':' and b[i][0] == ':':
                    return True
            return False

        nLengthRoutes = self._routes[method.lower()][len(route)]
        for i in xrange(0, len(nLengthRoutes)):
            if shouldInsert(route, nLengthRoutes[i][0]):
                nLengthRoutes.insert(i, (route, handler))
                break
        else:
            nLengthRoutes.append((route, handler))

        # Now handle the api doc if the handler has any attached
        if resource is None and hasattr(self, 'resourceName'):
            resource = self.resourceName
        elif resource is None:
            resource = handler.__module__.rsplit('.', 1)[-1]

        if hasattr(handler, 'description'):
            if handler.description is not None:
                docs.addRouteDocs(resource=resource,
                                  route=route,
                                  method=method,
                                  info=handler.description.asDict(),
                                  handler=handler)
        elif not nodoc:
            routePath = '/'.join([resource] + list(route))
            print TerminalColor.warning(
                'WARNING: No description docs present for route {} {}'.format(
                    method, routePath))
def create_molecule(data_str, input_format, user, public):
    # Use the SDF format as it is the one with bonding that 3Dmol uses.
    sdf_format = 'sdf'

    if input_format == 'pdb':
        (sdf_data, _) = openbabel.convert_str(data_str, input_format, sdf_format)
    elif input_format == 'inchi':
        (sdf_data, _) = openbabel.from_inchi(data_str, sdf_format)
    elif input_format == 'smi' or input_format == 'smiles':
        (sdf_data, _) = openbabel.from_smiles(data_str, sdf_format)
    else:
        sdf_data = avogadro.convert_str(data_str, input_format, sdf_format)

    atom_count = openbabel.atom_count(sdf_data, sdf_format)

    if atom_count > 1024:
        raise RestException('Unable to generate inchi, molecule has more than 1024 atoms .', code=400)

    (inchi, inchikey) = openbabel.to_inchi(sdf_data, sdf_format)

    if not inchi:
        raise RestException('Unable to extract inchi', code=400)

    # Check if the molecule exists, only create it if it does.
    molExists = MoleculeModel().find_inchikey(inchikey)
    mol = {}
    if molExists:
        mol = molExists
    else:
        # Get some basic molecular properties we want to add to the
        # database.
        props = avogadro.molecule_properties(sdf_data, sdf_format)
        pieces = props['spacedFormula'].strip().split(' ')
        atomCounts = {}
        for i in range(0, int(len(pieces) / 2)):
            atomCounts[pieces[2 * i ]] = int(pieces[2 * i + 1])

        cjson = {}
        if input_format == 'cjson':
            cjson = json.loads(data_str)
        else:
            cjson = json.loads(avogadro.convert_str(sdf_data, sdf_format,
                                                    'cjson'))

        smiles = openbabel.to_smiles(sdf_data, sdf_format)

        # Generate an svg file for an image
        svg_data = openbabel.to_svg(smiles, 'smiles')

        # Find the cjson version key
        version_key = 'chemicalJson'
        if version_key not in cjson:
            if 'chemical json' in cjson:
                version_key = 'chemical json'
            else:
                raise RestException('No "chemicalJson" key found', 400)

        # Whitelist parts of the CJSON that we store at the top level.
        cjsonmol = {}
        cjsonmol['atoms'] = cjson['atoms']
        cjsonmol['bonds'] = cjson['bonds']
        cjsonmol['chemicalJson'] = cjson[version_key]
        mol_dict = {
            'inchi': inchi,
            'inchikey': inchikey,
            'smiles': smiles,
            sdf_format: sdf_data,
            'cjson': cjsonmol,
            'properties': props,
            'atomCounts': atomCounts,
            'svg': svg_data
        }

        # Set a name if we find one
        name = chemspider.find_common_name(inchikey)
        if name is not None:
            mol_dict['name'] = name

        mol = MoleculeModel().create(user, mol_dict, public)

        # Upload the molecule to virtuoso
        try:
            semantic.upload_molecule(mol)
        except requests.ConnectionError:
            print(TerminalColor.warning('WARNING: Couldn\'t connect to Jena.'))

    return mol
Beispiel #49
0
def loadPlugins(plugins,
                root,
                appconf,
                apiRoot=None,
                curConfig=None,
                buildDag=True):
    """
    Loads a set of plugins into the application.

    :param plugins: The set of plugins to load, by directory name.
    :type plugins: list
    :param root: The root node of the server tree.
    :type root: object
    :param appconf: The server's cherrypy configuration object.
    :type appconf: dict
    :param apiRoot: The cherrypy api root object.
    :type apiRoot: object or None
    :param curConfig: A girder config object to use.
    :type curConfig: dict or None
    :param buildDag: If the ``plugins`` parameter is already a topo-sorted list
        with all dependencies resolved, set this to False and it will skip
        rebuilding the DAG. Otherwise the dependency resolution and sorting
        will occur within this method.
    :type buildDag: bool
    :returns: A 3-tuple containing the modified root, config, and apiRoot
        objects.
    :rtype tuple:
    """
    # Register a pseudo-package for the root of all plugins. This must be
    # present in the system module list in order to avoid import warnings.
    if curConfig is None:
        curConfig = config.getConfig()

    if 'plugins' in curConfig and 'plugin_directory' in curConfig['plugins']:
        print(
            TerminalColor.warning(
                'Warning: the plugin_directory setting is deprecated. Please use '
                'the `girder-install plugin` command and remove this setting from '
                'your config file.'))

    if ROOT_PLUGINS_PACKAGE not in sys.modules:
        module = imp.new_module(ROOT_PLUGINS_PACKAGE)
        girder.plugins = module
        sys.modules[ROOT_PLUGINS_PACKAGE] = module

    print(TerminalColor.info('Resolving plugin dependencies...'))

    if buildDag:
        plugins = getToposortedPlugins(plugins, curConfig, ignoreMissing=True)

    for plugin in plugins:
        try:
            root, appconf, apiRoot = loadPlugin(plugin,
                                                root,
                                                appconf,
                                                apiRoot,
                                                curConfig=curConfig)
            print(TerminalColor.success('Loaded plugin "%s"' % plugin))
        except Exception:
            print(
                TerminalColor.error('ERROR: Failed to load plugin "%s":' %
                                    plugin))
            girder.logger.exception('Plugin load failure: %s' % plugin)
            traceback.print_exc()

    return root, appconf, apiRoot
Beispiel #50
0
            continue
        # For each of our sources, try to import the named class from the
        # source module
        className = source['className']
        sourceModule = __import__(
            source['moduleName'].lstrip('.'), globals(), locals(), [className],
            len(source['moduleName']) - len(source['moduleName'].lstrip('.')))
        sourceClass = getattr(sourceModule, className)
        # Add the source class to the locals name so that it can be reached by
        # importing the tilesource module
        locals().update({className: sourceClass})
        # add it to our list of exports
        all.append(sourceClass)
        # add it to our dictionary of available sources if it has a name
        if getattr(sourceClass, 'name', None):
            AvailableTileSources[sourceClass.name] = sourceClass
    except ImportError:
        if girder:
            print(TerminalColor.error('Error: Could not import %s' % className))
            logger.exception('Error: Could not import %s' % className)
        else:
            logger.warning('Error: Could not import %s' % className)

# Create a partial function that will work through the known functions to get a
# tile source.
getTileSource = functools.partial(getTileSourceFromDict,
                                  AvailableTileSources)
all.append(getTileSource)

__all__ = all
Beispiel #51
0
def getDbConnection(uri=None, replicaSet=None, autoRetry=True, **kwargs):
    """
    Get a MongoClient object that is connected to the configured database.
    We lazy-instantiate a module-level singleton, the MongoClient objects
    manage their own connection pools internally. Any extra kwargs you pass to
    this method will be passed through to the MongoClient.

    :param uri: if specified, connect to this mongo db rather than the one in
                the config.
    :param replicaSet: if uri is specified, use this replica set.
    :param autoRetry: if this connection should automatically retry operations
        in the event of an AutoReconnect exception. If you're testing the
        connection, set this to False. If disabled, this also will not cache
        the mongo client, so make sure to only disable if you're testing a
        connection.
    :type autoRetry: bool
    """
    global _dbClients

    origKey = (uri, replicaSet)
    if origKey in _dbClients:
        return _dbClients[origKey]

    if uri is None or uri == '':
        dbConf = getDbConfig()
        uri = dbConf.get('uri')
        replicaSet = dbConf.get('replica_set')
    clientOptions = {
        # This is the maximum time between when we fetch data from a cursor.
        # If it times out, the cursor is lost and we can't reconnect.  If it
        # isn't set, we have issues with replica sets when the primary goes
        # down.  This value can be overridden in the mongodb uri connection
        # string with the socketTimeoutMS.
        'socketTimeoutMS': 60000,
        'connectTimeoutMS': 20000,
        'read_preference': ReadPreference.SECONDARY_PREFERRED,
        'replicaSet': replicaSet
    }
    clientOptions.update(kwargs)

    if uri is None:
        dbUriRedacted = 'mongodb://*****:*****@')
        if len(parts) == 2:
            dbUriRedacted = 'mongodb://' + parts[1]
        else:
            dbUriRedacted = uri

        client = pymongo.MongoClient(uri, **clientOptions)

    if autoRetry:
        client = MongoProxy(client, logger=logger)
        _dbClients[origKey] = _dbClients[(uri, replicaSet)] = client

    desc = ''
    if replicaSet:
        desc += ', replica set: %s' % replicaSet
    print(TerminalColor.info('Connected to MongoDB: %s%s' % (dbUriRedacted,
                                                             desc)))
    return client
import os
from chemspipy import ChemSpider
from girder.constants import TerminalColor

try:
    chemspikey = os.environ['chemspikey']
except KeyError:
    chemspikey = None
    print(TerminalColor.warning('WARNING: chemspikey not set, common names will not be resolved.'))


def find_common_name(inchikey):
    # Try to find the common name for the compound, if not, return None.

    name = None

    if chemspikey:
        cs = ChemSpider(chemspikey)

        if (len(inchikey) > 0):
          result = cs.search(inchikey)
          if (len(result) == 1):
            name = result[0].common_name

    return name
Beispiel #53
0
#  limitations under the License.
###############################################################################

import datetime
import six
import string

from girder.constants import AccessType, TerminalColor, TokenScope
from .model_base import AccessControlledModel

try:
    from random import SystemRandom
    random = SystemRandom()
    random.random()  # potentially raises NotImplementedError
except NotImplementedError:  # pragma: no cover
    print(TerminalColor.warning(
        'WARNING: using non-cryptographically secure PRNG.'))
    import random


def genToken(length=64):
    """
    Use this utility function to generate a random string of
    a desired length.
    """
    return ''.join(random.choice(string.ascii_letters + string.digits)
                   for x in range(length))


class Token(AccessControlledModel):
    """
    This model stores session tokens for user authentication.
Beispiel #54
0
def getDbConnection(uri=None, replicaSet=None, autoRetry=True, **kwargs):
    """
    Get a MongoClient object that is connected to the configured database.
    We lazy-instantiate a module-level singleton, the MongoClient objects
    manage their own connection pools internally. Any extra kwargs you pass to
    this method will be passed through to the MongoClient.

    :param uri: if specified, connect to this mongo db rather than the one in
                the config.
    :param replicaSet: if uri is specified, use this replica set.
    :param autoRetry: if this connection should automatically retry operations
        in the event of an AutoReconnect exception. If you're testing the
        connection, set this to False. If disabled, this also will not cache
        the mongo client, so make sure to only disable if you're testing a
        connection.
    :type autoRetry: bool
    """
    global _dbClients

    origKey = (uri, replicaSet)
    if origKey in _dbClients:
        return _dbClients[origKey]

    if uri is None or uri == '':
        dbConf = getDbConfig()
        uri = dbConf.get('uri')
        replicaSet = dbConf.get('replica_set')
    clientOptions = {
        # This is the maximum time between when we fetch data from a cursor.
        # If it times out, the cursor is lost and we can't reconnect.  If it
        # isn't set, we have issues with replica sets when the primary goes
        # down.  This value can be overridden in the mongodb uri connection
        # string with the socketTimeoutMS.
        'socketTimeoutMS': 60000,
        'connectTimeoutMS': 20000,
        'serverSelectionTimeoutMS': 20000,
        'read_preference': ReadPreference.SECONDARY_PREFERRED,
        'replicaSet': replicaSet
    }
    clientOptions.update(kwargs)

    if uri is None:
        dbUriRedacted = 'mongodb://*****:*****@')
        if len(parts) == 2:
            dbUriRedacted = 'mongodb://' + parts[1]
        else:
            dbUriRedacted = uri

        client = pymongo.MongoClient(uri, **clientOptions)

    # Make sure we can connect to the mongo server at startup
    client.server_info()

    if autoRetry:
        client = MongoProxy(client, logger=logger)
        _dbClients[origKey] = _dbClients[(uri, replicaSet)] = client

    desc = ''
    if replicaSet:
        desc += ', replica set: %s' % replicaSet
    print(
        TerminalColor.info('Connected to MongoDB: %s%s' %
                           (dbUriRedacted, desc)))
    return client
Beispiel #55
0
###############################################################################

import datetime
import six
import string

from girder.constants import AccessType, TerminalColor, TokenScope
from .model_base import AccessControlledModel

try:
    from random import SystemRandom
    random = SystemRandom()
    random.random()  # potentially raises NotImplementedError
except NotImplementedError:  # pragma: no cover
    print(
        TerminalColor.warning(
            'WARNING: using non-cryptographically secure PRNG.'))
    import random


def genToken(length=64):
    """
    Use this utility function to generate a random string of
    a desired length.
    """
    return ''.join(
        random.choice(string.ascii_letters + string.digits)
        for x in range(length))


class Token(AccessControlledModel):
    """