예제 #1
0
파일: base.py 프로젝트: opadron/video
def _postUpload(event):
    """
    Called when a file is uploaded. If the file was created by the video
    plugin's initial processing job, we register this file as such.
    """
    reference = event.info.get('reference', '')
    if not reference.startswith('videoPlugin'):
        return

    file = event.info['file']
    itemModel = ModelImporter.model('item')

    item = itemModel.load(file['itemId'], force=True, exc=True)
    itemVideoData = item.get('video', {})
    createdFiles = set(itemVideoData.get('createdFiles', []))

    createdFiles.add(str(file['_id']))

    itemVideoData['createdFiles'] = list(createdFiles)

    if reference == 'videoPluginMeta':
        fileModel = ModelImporter.model('file')
        with fileModel.open(file) as f:
            itemVideoData['meta'] = json.load(f)

    item['video'] = itemVideoData

    itemModel.save(item)
예제 #2
0
파일: search.py 프로젝트: satra/girder
def _commonSearchModeHandler(mode, query, types, user, level, limit, offset):
    """
    The common handler for `text` and `prefix` search modes.
    """
    # Avoid circular import
    from girder.api.v1.resource import allowedSearchTypes

    method = '%sSearch' % mode
    results = {}

    for modelName in types:
        if modelName not in allowedSearchTypes:
            continue

        if '.' in modelName:
            name, plugin = modelName.rsplit('.', 1)
            model = ModelImporter.model(name, plugin)
        else:
            model = ModelImporter.model(modelName)

        if model is not None:
            results[modelName] = [
                model.filter(d, user) for d in getattr(model, method)(
                    query=query, user=user, limit=limit, offset=offset, level=level)
            ]
    return results
예제 #3
0
파일: base.py 프로젝트: kotfic/large_image
def _postUpload(event):
    """
    Called when a file is uploaded. We check the parent item to see if it is
    expecting a large image upload, and if so we register this file as the
    result image.
    """
    fileObj = event.info['file']
    # There may not be an itemId (on thumbnails, for instance)
    if not fileObj.get('itemId'):
        return

    Item = ModelImporter.model('item')
    item = Item.load(fileObj['itemId'], force=True, exc=True)

    if item.get(
            'largeImage',
        {}).get('expected') and (fileObj['name'].endswith('.tiff')
                                 or fileObj.get('mimeType') == 'image/tiff'):
        if fileObj.get('mimeType') != 'image/tiff':
            fileObj['mimeType'] = 'image/tiff'
            ModelImporter.model('file').save(fileObj)
        del item['largeImage']['expected']
        item['largeImage']['fileId'] = fileObj['_id']
        item['largeImage']['sourceName'] = 'tiff'
        Item.save(item)
예제 #4
0
def checkForLargeImageFiles(event):
    file = event.info
    possible = False
    mimeType = file.get('mimeType')
    if mimeType in ('image/tiff', 'image/x-tiff', 'image/x-ptif'):
        possible = True
    exts = file.get('exts')
    if exts and exts[-1] in ('svs', 'ptif', 'tif', 'tiff', 'ndpi'):
        possible = True
    if not file.get('itemId') or not possible:
        return
    if not ModelImporter.model('setting').get(
            constants.PluginSettings.LARGE_IMAGE_AUTO_SET):
        return
    item = ModelImporter.model('item').load(
        file['itemId'], force=True, exc=False)
    if not item or item.get('largeImage'):
        return
    imageItemModel = ModelImporter.model('image_item', 'large_image')
    try:
        imageItemModel.createImageItem(item, file, createJob=False)
    except Exception:
        # We couldn't automatically set this as a large image
        logger.info('Saved file %s cannot be automatically used as a '
                    'largeImage' % str(file['_id']))
예제 #5
0
def _commonSearchModeHandler(mode, query, types, user, level, limit, offset):
    """
    The common handler for `text` and `prefix` search modes.
    """
    # Avoid circular import
    from girder.api.v1.resource import allowedSearchTypes

    method = '%sSearch' % mode
    results = {}

    for modelName in types:
        if modelName not in allowedSearchTypes:
            continue

        if '.' in modelName:
            name, plugin = modelName.rsplit('.', 1)
            model = ModelImporter.model(name, plugin)
        else:
            model = ModelImporter.model(modelName)

        if model is not None:
            results[modelName] = [
                model.filter(d, user)
                for d in getattr(model, method)(query=query,
                                                user=user,
                                                limit=limit,
                                                offset=offset,
                                                level=level)
            ]
    return results
예제 #6
0
파일: base.py 프로젝트: kotfic/large_image
def checkForLargeImageFiles(event):
    file = event.info
    possible = False
    mimeType = file.get('mimeType')
    if mimeType in ('image/tiff', 'image/x-tiff', 'image/x-ptif'):
        possible = True
    exts = file.get('exts')
    if exts and exts[-1] in ('svs', 'ptif', 'tif', 'tiff', 'ndpi'):
        possible = True
    if not file.get('itemId') or not possible:
        return
    if not ModelImporter.model('setting').get(
            constants.PluginSettings.LARGE_IMAGE_AUTO_SET):
        return
    item = ModelImporter.model('item').load(file['itemId'],
                                            force=True,
                                            exc=False)
    if not item or item.get('largeImage'):
        return
    imageItemModel = ModelImporter.model('image_item', 'large_image')
    try:
        imageItemModel.createImageItem(item, file, createJob=False)
    except Exception:
        # We couldn't automatically set this as a large image
        logger.info('Saved file %s cannot be automatically used as a '
                    'largeImage' % str(file['_id']))
예제 #7
0
파일: base.py 프로젝트: kotfic/large_image
def load(info):
    from .rest import TilesItemResource, LargeImageResource, AnnotationResource

    TilesItemResource(info['apiRoot'])
    info['apiRoot'].large_image = LargeImageResource()
    info['apiRoot'].annotation = AnnotationResource()

    ModelImporter.model('item').exposeFields(level=AccessType.READ,
                                             fields='largeImage')
    # Ask for the annotation model to make sure it is initialized.
    ModelImporter.model('annotation', plugin='large_image')

    events.bind('data.process', 'large_image', _postUpload)
    events.bind('jobs.job.update.after', 'large_image', _updateJob)
    events.bind('model.job.save', 'large_image', _updateJob)
    events.bind('model.job.remove', 'large_image', _updateJob)
    events.bind('model.folder.save.after', 'large_image',
                invalidateLoadModelCache)
    events.bind('model.group.save.after', 'large_image',
                invalidateLoadModelCache)
    events.bind('model.item.remove', 'large_image', invalidateLoadModelCache)
    events.bind('model.item.save.after', 'large_image',
                invalidateLoadModelCache)
    events.bind('model.file.save.after', 'large_image',
                checkForLargeImageFiles)
    events.bind('model.item.remove', 'large_image', removeThumbnails)
예제 #8
0
    def test_docker_image_model(self):
        from girder.models.model_base import ModelImporter

        raised = False
        try:
            ModelImporter.model('docker_image_model', 'slicer_cli_web')  # noqa
        except Exception as e:
            raised = True
            print str(e)

        self.assertFalse(raised, 'docker_image_model import failed')
예제 #9
0
def load(info):
    ModelImporter.model('user').ensureIndex((
        (('oauth.provider', SortDir.ASCENDING),
         ('oauth.id', SortDir.ASCENDING)), {}))
    ModelImporter.model('user').reconnect()

    events.bind('no_password_login_attempt', 'oauth', checkOauthUser)

    info['apiRoot'].oauth = rest.OAuth()

    SettingDefault.defaults[constants.PluginSettings.PROVIDERS_ENABLED] = []
예제 #10
0
def load(info):
    ModelImporter.model('user').ensureIndex(
        ((('oauth.provider', SortDir.ASCENDING), ('oauth.id',
                                                  SortDir.ASCENDING)), {}))
    ModelImporter.model('user').reconnect()

    events.bind('no_password_login_attempt', 'oauth', checkOauthUser)

    info['apiRoot'].oauth = rest.OAuth()

    SettingDefault.defaults[constants.PluginSettings.PROVIDERS_ENABLED] = []
예제 #11
0
def load(info):
    ModelImporter.model('user').ensureIndex((
        (('oauth.provider', SortDir.ASCENDING),
         ('oauth.id', SortDir.ASCENDING)), {}))
    ModelImporter.model('user').reconnect()

    events.bind('no_password_login_attempt', 'oauth', checkOauthUser)

    info['apiRoot'].oauth = rest.OAuth()

    # Make Google on by default for backward compatibility. To turn it off,
    # users will need to hit one of the "Save" buttons on the config page.
    SettingDefault.defaults[constants.PluginSettings.PROVIDERS_ENABLED] = ['google']
예제 #12
0
def load(info):
    ModelImporter.model('user').ensureIndex(
        ((('oauth.provider', SortDir.ASCENDING), ('oauth.id',
                                                  SortDir.ASCENDING)), {}))
    ModelImporter.model('user').reconnect()

    events.bind('model.setting.validate', 'oauth', validateSettings)
    events.bind('no_password_login_attempt', 'oauth', checkOauthUser)

    info['apiRoot'].oauth = rest.OAuth()

    # Make Google on by default for backward compatibility. To turn it off,
    # users will need to hit one of the "Save" buttons on the config page.
    SettingDefault.defaults[constants.PluginSettings.PROVIDERS_ENABLED] = \
        ['google']
예제 #13
0
def afterPostPutCollection(event):
    # This will only trigger if no exceptions (for access, invalid id, etc.) are thrown
    extraParams = event.info['params']
    if 'terms' in extraParams:
        collectionResponse = event.info['returnVal']
        collectionId = collectionResponse['_id']
        terms = extraParams['terms']

        ModelImporter.model('collection').update(
            {'_id': collectionId},
            {'$set': {'terms': terms}}
        )

        collectionResponse['terms'] = terms
        event.addResponse(collectionResponse)
    def delete_docker_image_from_repo(self, name, jobType):
        """
        Creates an asynchronous job to delete the docker images listed in name
        from the local machine
        :param name:A list of docker image names
        :type name: list of strings
        :param jobType: the value to use for the job's type. This is used by
        event listeners to determine which jobs are related to the DockerImages
        """

        jobModel = ModelImporter.model('job', 'jobs')

        job = jobModel.createLocalJob(

            module='girder.plugins.slicer_cli_web_ssr.image_job',

            function='deleteImage',
            kwargs={
                'deleteList': name
            },
            title='Deleting Docker Images',
            user=getCurrentUser(),
            type=jobType,
            public=True,
            async=True
        )

        jobModel.scheduleJob(job)
예제 #15
0
def load(info):

    girderRoot = info['serverRoot']
    histomicsRoot = Webroot(_template)
    histomicsRoot.updateHtmlVars(girderRoot.vars)
    histomicsRoot.updateHtmlVars({'title': 'HistomicsTK'})

    info['serverRoot'].histomicstk = histomicsRoot
    info['serverRoot'].girder = girderRoot

    # create root resource for all REST end points of HistomicsTK
    resource = DockerResource('HistomicsTK')
    setattr(info['apiRoot'], resource.resourceName, resource)

    # load docker images from cache
    dockerImageModel = ModelImporter.model('docker_image_model',
                                           'slicer_cli_web')
    dockerCache = dockerImageModel.loadAllImages()

    # generate REST end points for slicer CLIs of each docker image
    genRESTEndPointsForSlicerCLIsInDockerCache(resource, dockerCache)

    # auto-ingest annotations into database when a .anot file is uploaded
    events.bind('data.process', 'HistomicsTK', process_annotations)

    events.bind('jobs.job.update.after', resource.resourceName,
                resource.AddRestEndpoints)
예제 #16
0
def load(info):

    girderRoot = info['serverRoot']
    histomicsRoot = Webroot(_template)
    histomicsRoot.updateHtmlVars(girderRoot.vars)
    histomicsRoot.updateHtmlVars({'title': 'HistomicsTK'})

    info['serverRoot'].histomicstk = histomicsRoot
    info['serverRoot'].girder = girderRoot

    # create root resource for all REST end points of HistomicsTK
    resource = DockerResource('HistomicsTK')
    setattr(info['apiRoot'], resource.resourceName, resource)

    # load docker images from cache
    dockerImageModel = ModelImporter.model('docker_image_model',
                                           'slicer_cli_web')
    dockerCache = dockerImageModel.loadAllImages()

    # generate REST end points for slicer CLIs of each docker image
    genRESTEndPointsForSlicerCLIsInDockerCache(resource, dockerCache)

    # auto-ingest annotations into database when a .anot file is uploaded
    events.bind('data.process', 'HistomicsTK', process_annotations)

    events.bind('jobs.job.update.after', resource.resourceName,
                resource.AddRestEndpoints)
예제 #17
0
def _updateJob(event):
    """
    Called when a job is saved, updated, or removed.  If this is a large image
    job and it is ended, clean up after it.
    """
    global JobStatus
    if not JobStatus:
        from girder.plugins.jobs.constants import JobStatus

    job = event.info[
        'job'] if event.name == 'jobs.job.update.after' else event.info
    meta = job.get('meta', {})
    if (meta.get('creator') != 'large_image' or not meta.get('itemId')
            or meta.get('task') != 'createImageItem'):
        return
    status = job['status']
    if event.name == 'model.job.remove' and status not in (JobStatus.ERROR,
                                                           JobStatus.CANCELED,
                                                           JobStatus.SUCCESS):
        status = JobStatus.CANCELED
    if status not in (JobStatus.ERROR, JobStatus.CANCELED, JobStatus.SUCCESS):
        return
    item = ModelImporter.model('item').load(meta['itemId'], force=True)
    if not item or 'largeImage' not in item:
        return
    if item.get('largeImage', {}).get('expected'):
        # We can get a SUCCESS message before we get the upload message, so
        # don't clear the expected status on success.
        if status != JobStatus.SUCCESS:
            del item['largeImage']['expected']
    notify = item.get('largeImage', {}).get('notify')
    msg = None
    if notify:
        del item['largeImage']['notify']
        if status == JobStatus.SUCCESS:
            msg = 'Large image created'
        elif status == JobStatus.CANCELED:
            msg = 'Large image creation canceled'
        else:  # ERROR
            msg = 'FAILED: Large image creation failed'
        msg += ' for item %s' % item['name']
    if (status in (JobStatus.ERROR, JobStatus.CANCELED)
            and 'largeImage' in item):
        del item['largeImage']
    ModelImporter.model('item').save(item)
    if msg and event.name != 'model.job.remove':
        ModelImporter.model('job', 'jobs').updateJob(job, progressMessage=msg)
예제 #18
0
def load(info):
    # passed in resource name must match the attribute added to info[apiroot]
    resource = DockerResource('slicer_cli_web')
    info['apiRoot'].slicer_cli_web = resource

    dockerImageModel = ModelImporter.model('docker_image_model',
                                           'slicer_cli_web')
    dockerCache = dockerImageModel.loadAllImages()

    genRESTEndPointsForSlicerCLIsInDockerCache(resource, dockerCache)

    ModelImporter.model('job', 'jobs').exposeFields(level=AccessType.READ, fields={
        'slicerCLIBindings'})

    events.bind('jobs.job.update.after', resource.resourceName,
                resource.AddRestEndpoints)
    events.bind('data.process', info['name'], _onUpload)
예제 #19
0
def _updateJob(event):
    """
    Called when a job is saved, updated, or removed.  If this is a large image
    job and it is ended, clean up after it.
    """
    global JobStatus
    if not JobStatus:
        from girder.plugins.jobs.constants import JobStatus

    job = event.info['job'] if event.name == 'jobs.job.update.after' else event.info
    meta = job.get('meta', {})
    if (meta.get('creator') != 'large_image' or not meta.get('itemId') or
            meta.get('task') != 'createImageItem'):
        return
    status = job['status']
    if event.name == 'model.job.remove' and status not in (
            JobStatus.ERROR, JobStatus.CANCELED, JobStatus.SUCCESS):
        status = JobStatus.CANCELED
    if status not in (JobStatus.ERROR, JobStatus.CANCELED, JobStatus.SUCCESS):
        return
    item = ModelImporter.model('item').load(meta['itemId'], force=True)
    if not item or 'largeImage' not in item:
        return
    if item.get('largeImage', {}).get('expected'):
        # We can get a SUCCESS message before we get the upload message, so
        # don't clear the expected status on success.
        if status != JobStatus.SUCCESS:
            del item['largeImage']['expected']
    notify = item.get('largeImage', {}).get('notify')
    msg = None
    if notify:
        del item['largeImage']['notify']
        if status == JobStatus.SUCCESS:
            msg = 'Large image created'
        elif status == JobStatus.CANCELED:
            msg = 'Large image creation canceled'
        else:  # ERROR
            msg = 'FAILED: Large image creation failed'
        msg += ' for item %s' % item['name']
    if (status in (JobStatus.ERROR, JobStatus.CANCELED) and
            'largeImage' in item):
        del item['largeImage']
    ModelImporter.model('item').save(item)
    if msg and event.name != 'model.job.remove':
        ModelImporter.model('job', 'jobs').updateJob(job, progressMessage=msg)
예제 #20
0
def load(info):
    # Augment the collection creation and edit routes to accept a terms field
    events.bind('rest.post.collection.after', 'terms', afterPostPutCollection)
    events.bind('rest.put.collection/:id.after', 'terms', afterPostPutCollection)
    for handler in [
        Collection.createCollection,
        Collection.updateCollection
    ]:
        handler.description.param('terms', 'The Terms of Use for the collection.', required=False)

    # Expose the terms field on all collections
    ModelImporter.model('collection').exposeFields(level=AccessType.READ, fields={'terms'})

    # Add endpoint for registered users to accept terms
    info['apiRoot'].collection.route('POST', (':id', 'acceptTerms'), acceptCollectionTerms)

    # Expose the terms field on all users
    ModelImporter.model('user').exposeFields(level=AccessType.ADMIN, fields={'terms'})
예제 #21
0
파일: base.py 프로젝트: opadron/video
def load(info):
    from .rest import addItemRoutes

    addItemRoutes(info['apiRoot'].item)

    ModelImporter.model('item').exposeFields(level=AccessType.READ,
                                             fields='video')

    events.bind('data.process', 'video', _postUpload)
    events.bind('jobs.job.update.after', 'video', updateJob)
    events.bind('model.job.save', 'video', updateJob)
    events.bind('model.job.remove', 'video', updateJob)
    ## events.bind('model.folder.save.after', 'video',
    ##             invalidateLoadModelCache)
    ## events.bind('model.group.save.after', 'video',
    ##             invalidateLoadModelCache)
    ## events.bind('model.item.remove', 'video', invalidateLoadModelCache)
    events.bind('model.file.save.after', 'video', checkForLargeImageFiles)
    events.bind('model.item.remove', 'video', removeThumbnails)
예제 #22
0
def acceptCollectionTerms(self, collection, termsHash):
    if not collection.get('terms'):
        raise RestException('This collection currently has no terms.')

    # termsHash should be encoded to a bytes object, but storing bytes into MongoDB behaves
    # differently in Python 2 vs 3. Additionally, serializing a bytes to JSON behaves differently
    # in Python 2 vs 3. So, just keep it as a unicode (or ordinary Python 2 str).
    realTermsHash = hashlib.sha256(collection['terms'].encode('utf-8')).hexdigest()
    if termsHash != realTermsHash:
        # This "proves" that the client has at least accessed the terms
        raise RestException(
            'The submitted "termsHash" does not correspond to the collection\'s current terms.')

    ModelImporter.model('user').update(
        {'_id': self.getCurrentUser()['_id']},
        {'$set': {
            'terms.collection.%s' % collection['_id']: {
                'hash': termsHash,
                'accepted': datetime.datetime.now()
            }
        }}
    )
예제 #23
0
def load(info):

    # passed in resource name must match the attribute added to info[apiroot]
    resource = DockerResource('slicer_cli_web')
    info['apiRoot'].slicer_cli_web = resource

    dockerImageModel = ModelImporter.model('docker_image_model',
                                           'slicer_cli_web')
    dockerCache = dockerImageModel.loadAllImages()

    genRESTEndPointsForSlicerCLIsInDockerCache(resource, dockerCache)

    events.bind('jobs.job.update.after', resource.resourceName,
                resource.AddRestEndpoints)
예제 #24
0
def _postUpload(event):
    """
    Called when a file is uploaded. We check the parent item to see if it is
    expecting a large image upload, and if so we register this file as the
    result image.
    """
    fileObj = event.info['file']
    # There may not be an itemId (on thumbnails, for instance)
    if not fileObj.get('itemId'):
        return

    Item = ModelImporter.model('item')
    item = Item.load(fileObj['itemId'], force=True, exc=True)

    if item.get('largeImage', {}).get('expected') and (
            fileObj['name'].endswith('.tiff') or
            fileObj.get('mimeType') == 'image/tiff'):
        if fileObj.get('mimeType') != 'image/tiff':
            fileObj['mimeType'] = 'image/tiff'
            ModelImporter.model('file').save(fileObj)
        del item['largeImage']['expected']
        item['largeImage']['fileId'] = fileObj['_id']
        item['largeImage']['sourceName'] = 'tiff'
        Item.save(item)
예제 #25
0
def load(info):
    from .rest import TilesItemResource, LargeImageResource, AnnotationResource

    TilesItemResource(info['apiRoot'])
    info['apiRoot'].large_image = LargeImageResource()
    info['apiRoot'].annotation = AnnotationResource()

    ModelImporter.model('item').exposeFields(
        level=AccessType.READ, fields='largeImage')
    # Ask for the annotation model to make sure it is initialized.
    ModelImporter.model('annotation', plugin='large_image')

    events.bind('data.process', 'large_image', _postUpload)
    events.bind('jobs.job.update.after', 'large_image', _updateJob)
    events.bind('model.job.save', 'large_image', _updateJob)
    events.bind('model.job.remove', 'large_image', _updateJob)
    events.bind('model.folder.save.after', 'large_image',
                invalidateLoadModelCache)
    events.bind('model.group.save.after', 'large_image',
                invalidateLoadModelCache)
    events.bind('model.item.remove', 'large_image', invalidateLoadModelCache)
    events.bind('model.file.save.after', 'large_image',
                checkForLargeImageFiles)
    events.bind('model.item.remove', 'large_image', removeThumbnails)
예제 #26
0
def _onUpload(event):
    try:
        ref = json.loads(event.info.get('reference'))
    except (ValueError, TypeError):
        return

    if isinstance(ref, dict) and ref.get('type') == 'slicer_cli.parameteroutput':
        jobModel = ModelImporter.model('job', 'jobs')
        job = jobModel.load(ref['jobId'], force=True, exc=True)

        file = event.info['file']

        # Add link to job model to the output item
        jobModel.updateJob(job, otherFields={
            'slicerCLIBindings.outputs.parameters': file['_id']
        })
예제 #27
0
    def putDockerImage(self, names, jobType, pullIfNotLocal=False):
        """
        Attempts to cache metadata on the docker images listed in the names
        list.
        If the pullIfNotLocal flag is true, the job will attempt to pull
         the image if it does not exist.
        :param names: A list of docker image names(can use with tags or digests)
        :param jobType: defines the jobtype of the job that will be schedueled
         ,used by event listeners to determine if a job succeeded or not
         :param pullIfNotLocal: Boolean to determine whether a non existent
         image
         should be pulled,(attempts to pull from default docker hub registry)
        """
        jobModel = ModelImporter.model('job', 'jobs')
        # list of images to pull and load
        pullList = []
        # list of images that exist locally and just need to be parsed and saved
        loadList = []
        for name in names:

            try:

                self._ImageExistsLocally(name)

                data = self.collection.find_one(DockerImage.getHashKey(name))

                if data is None:
                    loadList.append(name)
            # exception can be dockerimage
            except DockerImageNotFoundError:
                if pullIfNotLocal:
                    pullList.append(name)

        job = jobModel.createLocalJob(
            module='girder.plugins.slicer_cli_web.image_job',
            function='jobPullAndLoad',
            kwargs={
                'pullList': pullList,
                'loadList': loadList
            },
            title='Pulling and caching docker images ',
            type=jobType,
            user=getCurrentUser(),
            public=True,
            async=True)

        jobModel.scheduleJob(job)
예제 #28
0
def updateJob(event):
    """
    Called when a job is saved, updated, or removed.  If this is a video
    job and it is ended, clean up after it.
    """
    global JobStatus
    if not JobStatus:
        from girder.plugins.jobs.constants import JobStatus

    job = (
        event.info['job']
        if event.name == 'jobs.job.update.after'
        else event.info
    )

    jobVideoData = job.get('meta', {}).get('video_plugin')
    if jobVideoData is None:
        return

    videoItemId = jobVideoData.get('itemId')
    videoFileId = jobVideoData.get('fileId')
    if videoItemId is None or videoFileId is None:
        return

    status = job['status']
    if event.name == 'model.job.remove' and status not in (
            JobStatus.ERROR, JobStatus.CANCELED, JobStatus.SUCCESS):
        status = JobStatus.CANCELED
    if status not in (JobStatus.ERROR, JobStatus.CANCELED, JobStatus.SUCCESS):
        return

    item = ModelImporter.model('item').load(videoItemId, force=True)
    if not item:
        return

    itemVideoData = item.get('video')
    if itemVideoData is None:
        return

    if itemVideoData['jobId'] != str(job['_id']):
        return

    # TODO(opadron): remove this after this section is finished
    print(
        'Found video item %s from job %s' %
        (videoItemId, str(job['_id'])))
예제 #29
0
def load(info):

    girderRoot = info['serverRoot']
    histomicsRoot = WebrootHistomicsTK(_template)
    histomicsRoot.updateHtmlVars(girderRoot.vars)

    ImageBrowseResource(info['apiRoot'])

    # The interface is always available under histomicstk and also available
    # under the specified path.
    info['serverRoot'].histomicstk = histomicsRoot
    webrootPath = Setting().get(PluginSettings.HISTOMICSTK_WEBROOT_PATH)
    setattr(info['serverRoot'], webrootPath, histomicsRoot)
    info['serverRoot'].girder = girderRoot

    pluginName = 'HistomicsTK'
    # create root resource for all REST end points of HistomicsTK
    resource = HistomicsTKResource(pluginName)
    setattr(info['apiRoot'], resource.resourceName, resource)

    # load docker images from cache
    dockerImageModel = ModelImporter.model('docker_image_model',
                                           'slicer_cli_web')
    dockerCache = dockerImageModel.loadAllImages()

    # generate REST end points for slicer CLIs of each docker image
    genRESTEndPointsForSlicerCLIsInDockerCache(resource, dockerCache)

    # auto-ingest annotations into database when a .anot file is uploaded
    events.bind('data.process', pluginName, process_annotations)

    events.bind('jobs.job.update.after', resource.resourceName,
                resource.AddRestEndpoints)

    events.bind('model.job.save', pluginName, _saveJob)

    def updateWebroot(event):
        """
        If the webroot path setting is changed, bind the new path to the
        histomicstk webroot resource.
        """
        if event.info.get('key') == PluginSettings.HISTOMICSTK_WEBROOT_PATH:
            setattr(info['serverRoot'], event.info['value'], histomicsRoot)

    events.bind('model.setting.save.after', 'histomicstk', updateWebroot)
예제 #30
0
def loadModel(kind):
    """Load a model class from its name."""
    return ModelImporter.model(kind)
예제 #31
0
def loadModel(kind):
    """Load a model class from its name."""
    return ModelImporter().model(kind)
예제 #32
0
파일: base.py 프로젝트: kotfic/large_image
def removeThumbnails(event):
    ModelImporter.model('image_item',
                        'large_image').removeThumbnailFiles(event.info)
예제 #33
0
def removeThumbnails(event):
    ModelImporter.model('image_item', 'large_image').removeThumbnailFiles(
        event.info)
예제 #34
0
def load(info):

    girderRoot = info['serverRoot']
    histomicsRoot = WebrootHistomicsTK(_template)
    histomicsRoot.updateHtmlVars(girderRoot.vars)

    ImageBrowseResource(info['apiRoot'])

    # The interface is always available under histomicstk and also available
    # under the specified path.
    info['serverRoot'].histomicstk = histomicsRoot
    webrootPath = Setting().get(PluginSettings.HISTOMICSTK_WEBROOT_PATH)
    setattr(info['serverRoot'], webrootPath, histomicsRoot)
    info['serverRoot'].girder = girderRoot

    pluginName = 'HistomicsTK'
    # create root resource for all REST end points of HistomicsTK
    resource = HistomicsTKResource(pluginName)
    setattr(info['apiRoot'], resource.resourceName, resource)

    # load docker images from cache
    dockerImageModel = ModelImporter.model('docker_image_model',
                                           'slicer_cli_web')
    dockerCache = dockerImageModel.loadAllImages()

    # generate REST end points for slicer CLIs of each docker image
    genRESTEndPointsForSlicerCLIsInDockerCache(resource, dockerCache)

    # auto-ingest annotations into database when a .anot file is uploaded
    events.bind('data.process', pluginName, process_annotations)

    events.bind('jobs.job.update.after', resource.resourceName,
                resource.AddRestEndpoints)

    events.bind('model.job.save', pluginName, _saveJob)

    def updateWebroot(event):
        """
        If the webroot path setting is changed, bind the new path to the
        histomicstk webroot resource.
        """
        if event.info.get('key') == PluginSettings.HISTOMICSTK_WEBROOT_PATH:
            setattr(info['serverRoot'], event.info['value'], histomicsRoot)

    events.bind('model.setting.save.after', 'histomicstk', updateWebroot)

    curConfig = config.getConfig().get('histomicstk', {})
    if curConfig.get('restrict_downloads'):
        # Change some endpoints to require token access
        endpoints = [
            ('collection', 'GET', (':id', 'download')),
            ('file', 'GET', (':id', 'download')),
            ('file', 'GET', (':id', 'download', ':name')),
            ('folder', 'GET', (':id', 'download')),
            ('item', 'GET', (':id', 'download')),
            ('resource', 'GET', ('download', )),
            ('resource', 'POST', ('download', )),

            ('item', 'GET', (':itemId', 'tiles', 'images', ':image')),
        ]

        for resource, method, route in endpoints:
            cls = getattr(info['apiRoot'], resource)
            func = cls.getRouteHandler(method, route)
            if func.accessLevel == 'public':
                func = access.token(func)
                cls.removeRoute(method, route)
                cls.route(method, route, func)
예제 #35
0
def deleteImage(job):
    """
    Deletes the docker images specified in the job from the local machine.
    Images are forcefully removed (equivalent to docker rmi -f)
    :param job: The job object specifying the docker images to remove from
    the local machine

    """

    jobModel = ModelImporter.model('job', 'jobs')

    jobModel.updateJob(
        job,
        log='Started to Delete Docker images\n',
        status=JobStatus.RUNNING,
    )
    try:
        deleteList = job['kwargs']['deleteList']
        error = False

        try:
            docker_client = docker.from_env(version='auto')

        except docker.errors.DockerException as err:
            logger.exception('Could not create the docker client')
            jobModel.updateJob(
                job,
                log='Failed to create the Docker Client\n' + str(err) + '\n',
                status=JobStatus.ERROR,
            )
            raise DockerImageError('Could not create the docker client')

        for name in deleteList:
            try:
                docker_client.images.remove(name, force=True)

            except Exception as err:
                logger.exception('Failed to remove image')
                jobModel.updateJob(
                    job,
                    log='Failed to remove image \n' + str(err) + '\n',
                    status=JobStatus.RUNNING,
                )
                error = True
        if error is True:
            jobModel.updateJob(job,
                               log='Failed to remove some images',
                               status=JobStatus.ERROR,
                               notify=True,
                               progressMessage='Errors deleting some images')
        else:

            jobModel.updateJob(job,
                               log='Removed all images',
                               status=JobStatus.SUCCESS,
                               notify=True,
                               progressMessage='Removed all images')
    except Exception as err:
        logger.exception('Error with job')
        jobModel.updateJob(
            job,
            log='Error with job \n ' + str(err) + '\n',
            status=JobStatus.ERROR,
        )
예제 #36
0
def jobPullAndLoad(job):
    """
    Attempts to cache metadata on images in the pull list and load list.
    Images in the pull list are pulled first, then images in both lists are
    queried for there clis and each cli's xml description. The clis and
    xml data is stored in the girder mongo database
    Event Listeners assume the job is done when the job status
     is ERROR or SUCCESS.
    Event listeners check the jobtype to determine if a job is Dockerimage
    related
    """
    try:
        jobModel = ModelImporter.model('job', 'jobs')
        pullList = job['kwargs']['pullList']
        loadList = job['kwargs']['loadList']

        errorState = False

        notExistSet = set()
        jobModel.updateJob(
            job,
            log='Started to Load Docker images\n',
            status=JobStatus.RUNNING,
        )
        try:
            docker_client = docker.from_env(version='auto')

        except docker.errors.DockerException as err:
            logger.exception('Could not create the docker client')
            jobModel.updateJob(
                job,
                log='Failed to create the Docker Client\n' + str(err) + '\n',
            )
            raise DockerImageError('Could not create the docker client')

        try:
            pullDockerImage(docker_client, pullList)
        except DockerImageNotFoundError as err:
            errorState = True
            notExistSet = set(err.imageName)
            jobModel.updateJob(
                job,
                log='could not find the following '
                'images\n' + '\n'.join(notExistSet) + '\n',
                status=JobStatus.ERROR,
            )
        cache, loadingError = LoadMetadata(jobModel, job, docker_client,
                                           pullList, loadList, notExistSet)
        imageModel = ModelImporter.model('docker_image_model',
                                         'slicer_cli_web')

        imageModel.saveAllImgs(cache)
        if errorState is False and loadingError is False:
            newStatus = JobStatus.SUCCESS
        else:
            newStatus = JobStatus.ERROR
        jobModel.updateJob(job,
                           log='Finished caching Docker image data\n',
                           status=newStatus,
                           notify=True,
                           progressMessage='Completed caching docker images')
    except Exception as err:
        logger.exception('Error with job')
        jobModel.updateJob(
            job,
            log='Error with job \n ' + str(err) + '\n',
            status=JobStatus.ERROR,
        )