示例#1
0
    def deleteNotebook(self, notebook, token):
        payload = {
            'serviceInfo': notebook.get('serviceInfo', {}),
            'girder_token': str(token['_id']),
            'apiUrl': getWorkerApiUrl()
        }

        try:
            instanceTask = getCeleryApp().send_task(
                'gwvolman.tasks.shutdown_container', args=[payload],
                queue='manager',
            )
            instanceTask.get()
        except Exception:
            pass

        try:
            volumeTask = getCeleryApp().send_task(
                'gwvolman.tasks.remove_volume', args=[payload],
                queue=notebook['serviceInfo']['nodeId']
            )
            volumeTask.get()
        except Exception:
            pass

        self.remove(notebook)
示例#2
0
    def createInstance(self, tale, user, token, name=None, save=True):
        existing = self.findOne({
            'taleId': tale['_id'],
            'creatorId': user['_id'],
        })
        if existing:
            return existing

        if not name:
            name = tale.get('title', '')

        workspaceFolder = self.model('tale', 'wholetale').createWorkspace(tale)

        now = datetime.datetime.utcnow()
        payload = {
            'girder_token': str(token['_id']),
            'apiUrl': getWorkerApiUrl(),
            'taleId': str(tale['_id']),
            'workspaceId': str(workspaceFolder['_id']),
            'api_version': API_VERSION
        }

        volumeTask = getCeleryApp().send_task('gwvolman.tasks.create_volume',
                                              args=[payload])
        volume = volumeTask.get(timeout=TASK_TIMEOUT)
        payload.update(volume)

        serviceTask = getCeleryApp().send_task(
            'gwvolman.tasks.launch_container', args=[payload], queue='manager')
        service = serviceTask.get(timeout=TASK_TIMEOUT)
        service.update(volume)

        netloc = urllib.parse.urlsplit(getApiUrl()).netloc
        domain = '{}.{}'.format(service['name'],
                                netloc.split(':')[0].split('.', 1)[1])
        url = 'https://{}/{}'.format(domain, service.get('urlPath', ''))

        _wait_for_server(url)

        instance = {
            'taleId': tale['_id'],
            'created': now,
            'creatorId': user['_id'],
            'lastActivity': now,
            'containerInfo': service,
            'name': name,
            'status': InstanceStatus.RUNNING,  # be optimistic for now
            'url': url
        }

        self.setUserAccess(instance, user=user, level=AccessType.ADMIN)
        if save:
            instance = self.save(instance)

        return instance
def getJobResult(self, job):
    user = self.getCurrentUser()
    if not job.get('public', False):
        if user:
            JobModel().requireAccess(job, user, level=AccessType.READ)
        else:
            self.ensureTokenScopes('jobs.job_' + str(job['_id']))

    if 'result' in job:
        return job['result']

    celeryTaskId = job.get('celeryTaskId')
    if celeryTaskId is None:
        logger.warn("Job '{}' doesn't have a Celery task id.".format(
            job['_id']))
        return
    if job['status'] != JobStatus.SUCCESS:
        logger.warn("Job '{}' hasn't completed sucessfully.".format(
            job['_id']))
    asyncResult = getCeleryApp().AsyncResult(celeryTaskId)
    try:
        result = asyncResult.get()
    except Exception as ex:
        result = str(ex)
    return result
示例#4
0
    def deleteInstance(self, instance, token):
        payload = {
            'instanceId': str(instance['_id']),
            'girder_token': str(token['_id']),
            'apiUrl': getWorkerApiUrl()
        }

        app = getCeleryApp()
        active_queues = list(app.control.inspect().active_queues().keys())

        instanceTask = app.send_task(
            'gwvolman.tasks.shutdown_container',
            args=[payload],
            queue='manager',
        )
        instanceTask.get(timeout=TASK_TIMEOUT)

        queue = 'celery@{}'.format(instance['containerInfo']['nodeId'])
        if queue in active_queues:
            volumeTask = app.send_task(
                'gwvolman.tasks.remove_volume',
                args=[payload],
                queue=instance['containerInfo']['nodeId'])
            volumeTask.get(timeout=TASK_TIMEOUT)

        # TODO: handle error
        self.remove(instance)
    def deleteInstance(self, instance, user):
        instance["status"] = InstanceStatus.DELETING
        instance = self.updateInstance(instance)
        token = Token().createToken(user=user, days=0.5)
        app = getCeleryApp()
        active_queues = list(app.control.inspect().active_queues().keys())

        instanceTask = shutdown_container.signature(
            args=[str(instance['_id'])],
            queue='manager',
            girder_client_token=str(token['_id']),
        ).apply_async()
        instanceTask.get(timeout=TASK_TIMEOUT)

        try:
            queue = 'celery@{}'.format(instance['containerInfo']['nodeId'])
            if queue in active_queues:
                volumeTask = remove_volume.signature(
                    args=[str(instance['_id'])],
                    girder_client_token=str(token['_id']),
                    queue=instance['containerInfo']['nodeId']).apply_async()
                volumeTask.get(timeout=TASK_TIMEOUT)
        except KeyError:
            pass

        # TODO: handle error
        self.remove(instance)
示例#6
0
文件: __init__.py 项目: Kitware/flow
def flowConvertData(inputType, inputFormat, outputFormat, params):
    content = cherrypy.request.body.read()

    asyncResult = getCeleryApp().send_task('girder_worker.convert', [
        inputType,
        {"data": content, "format": inputFormat},
        {"format": outputFormat}
    ])

    return asyncResult.get()
示例#7
0
文件: __init__.py 项目: Kitware/flow
    def flowConvert(itemId, inputType, inputFormat, outputFormat, params):
        itemApi = info['apiRoot'].item

        content = getItemContent(itemId, itemApi)

        asyncResult = getCeleryApp().send_task('girder_worker.convert', [
            inputType,
            {"data": content, "format": inputFormat},
            {"format": outputFormat}
        ])

        return asyncResult.get()
示例#8
0
def flowConvertData(inputType, inputFormat, outputFormat, params):
    content = cherrypy.request.body.read()

    asyncResult = getCeleryApp().send_task('girder_worker.convert', [
        inputType, {
            "data": content,
            "format": inputFormat
        }, {
            "format": outputFormat
        }
    ])

    return asyncResult.get()
示例#9
0
    def flowConvert(itemId, inputType, inputFormat, outputFormat, params):
        itemApi = info['apiRoot'].item

        content = getItemContent(itemId, itemApi)

        asyncResult = getCeleryApp().send_task('girder_worker.convert', [
            inputType, {
                "data": content,
                "format": inputFormat
            }, {
                "format": outputFormat
            }
        ])

        return asyncResult.get()
示例#10
0
 def updateImageStatus(self, event):
     job = event.info['job']
     if job['type'] == 'build_image' and job.get('status') is not None:
         status = int(job['status'])
         # FIXME: Who should be able to build images?
         image = self.model('image', 'wholetale').load(job['args'][0],
                                                       force=True)
         if status == JobStatus.SUCCESS:
             result = getCeleryApp().AsyncResult(job['celeryTaskId']).get()
             image['digest'] = result['Id']
             image['status'] = ImageStatus.AVAILABLE
         elif status == JobStatus.ERROR:
             image['status'] = ImageStatus.INVALID
         elif status in (JobStatus.QUEUED, JobStatus.RUNNING):
             image['status'] = ImageStatus.BUILDING
         self.model('image', 'wholetale').updateImage(image)
示例#11
0
    def flowRunStatus(itemId, jobId, params):
        celeryTaskId = getTaskId(jobId)

        # Get the celery result for the corresponding task ID.
        result = AsyncResult(celeryTaskId, backend=getCeleryApp().backend)
        try:
            response = {'status': result.state}
            if result.state == celery.states.FAILURE:
                response['message'] = str(result.result)
            elif result.state == 'PROGRESS':
                response['meta'] = str(result.result)
            return response
        except Exception:
            return {
                'status': 'FAILURE',
                'message': sys.exc_info(),
                'trace': sys.exc_info()[2]
            }
示例#12
0
文件: __init__.py 项目: Kitware/flow
    def flowRunStatus(itemId, jobId, params):
        celeryTaskId = getTaskId(jobId)

        # Get the celery result for the corresponding task ID.
        result = AsyncResult(celeryTaskId, backend=getCeleryApp().backend)
        try:
            response = {'status': result.state}
            if result.state == celery.states.FAILURE:
                response['message'] = str(result.result)
            elif result.state == 'PROGRESS':
                response['meta'] = str(result.result)
            return response
        except Exception:
            return {
                'status': 'FAILURE',
                'message': sys.exc_info(),
                'trace': sys.exc_info()[2]
            }
示例#13
0
    def handleUpdateJob(self, event):
        job = event.info['job']
        if not (job['title'] == 'Update Instance'
                and job.get('status') is not None):
            return

        status = int(job['status'])
        instance = self._model.load(job['args'][0], force=True)

        if status == JobStatus.SUCCESS:
            result = getCeleryApp().AsyncResult(job['celeryTaskId']).get()
            instance['containerInfo'].update(result)
            instance['status'] = InstanceStatus.RUNNING
        elif status == JobStatus.ERROR:
            instance['status'] = InstanceStatus.ERROR
        elif status in (JobStatus.QUEUED, JobStatus.RUNNING):
            instance['status'] = InstanceStatus.LAUNCHING
        self._model.updateInstance(instance)
示例#14
0
文件: __init__.py 项目: Kitware/flow
        def streamGen():
            start = time.time()
            endtime = None
            oldLog = ''
            while (time.time() - start < timeout and
                   cherrypy.engine.state == cherrypy.engine.states.STARTED and
                   (endtime is None or time.time() < endtime)):
                # Display new log info from this job since the
                # last execution of this loop.
                job = jobApi.model('job', 'jobs').load(
                    jobId,
                    user=jobApi.getCurrentUser(),
                    level=AccessType.READ)
                newLog = job['log']
                if newLog != oldLog:
                    start = time.time()
                    logDiff = newLog[newLog.find(oldLog) + len(oldLog):]
                    oldLog = newLog
                    # We send a separate message for each line,
                    # as I discovered that any information after the
                    # first newline was being lost...
                    for line in logDiff.rstrip().split('\n'):
                        yield sseMessage(line)
                if endtime is None:
                    result = AsyncResult(celeryTaskId,
                                         backend=getCeleryApp().backend)
                    if (result.state == celery.states.FAILURE or
                            result.state == celery.states.SUCCESS or
                            result.state == celery.states.REVOKED):
                        # Stop checking for messages in 5 seconds
                        endtime = time.time() + 5
                time.sleep(0.5)

            # Signal the end of the stream
            yield 'event: eof\ndata: null\n\n'

            # One more for good measure - client should not get this
            yield 'event: past-end\ndata: null\n\n'
示例#15
0
    def updateBuildStatus(self, event):
        """
        Event handler that updates the Tale object based on the build_tale_image task.
        """
        job = event.info['job']
        if job['title'] == 'Build Tale Image' and job.get(
                'status') is not None:
            status = int(job['status'])
            tale = self.model('tale', 'wholetale').load(job['args'][0],
                                                        force=True)

            if 'imageInfo' not in tale:
                tale['imageInfo'] = {}

            # Store the previous status, if present.
            previousStatus = -1
            try:
                previousStatus = tale['imageInfo']['status']
            except KeyError:
                pass

            if status == JobStatus.SUCCESS:
                result = getCeleryApp().AsyncResult(job['celeryTaskId']).get()
                tale['imageInfo']['digest'] = result['image_digest']
                tale['imageInfo']['repo2docker_version'] = result[
                    'repo2docker_version']
                tale['imageInfo']['last_build'] = result['last_build']
                tale['imageInfo']['status'] = ImageStatus.AVAILABLE
            elif status == JobStatus.ERROR:
                tale['imageInfo']['status'] = ImageStatus.INVALID
            elif status in (JobStatus.QUEUED, JobStatus.RUNNING):
                tale['imageInfo']['jobId'] = job['_id']
                tale['imageInfo']['status'] = ImageStatus.BUILDING

            # If the status changed, save the object
            if 'status' in tale['imageInfo'] and tale['imageInfo'][
                    'status'] != previousStatus:
                self.model('tale', 'wholetale').updateTale(tale)
示例#16
0
        def streamGen():
            start = time.time()
            endtime = None
            oldLog = ''
            while (time.time() - start < timeout
                   and cherrypy.engine.state == cherrypy.engine.states.STARTED
                   and (endtime is None or time.time() < endtime)):
                # Display new log info from this job since the
                # last execution of this loop.
                job = jobApi.model('job',
                                   'jobs').load(jobId,
                                                user=jobApi.getCurrentUser(),
                                                level=AccessType.READ)
                newLog = job['log']
                if newLog != oldLog:
                    start = time.time()
                    logDiff = newLog[newLog.find(oldLog) + len(oldLog):]
                    oldLog = newLog
                    # We send a separate message for each line,
                    # as I discovered that any information after the
                    # first newline was being lost...
                    for line in logDiff.rstrip().split('\n'):
                        yield sseMessage(line)
                if endtime is None:
                    result = AsyncResult(celeryTaskId,
                                         backend=getCeleryApp().backend)
                    if (result.state == celery.states.FAILURE
                            or result.state == celery.states.SUCCESS
                            or result.state == celery.states.REVOKED):
                        # Stop checking for messages in 5 seconds
                        endtime = time.time() + 5
                time.sleep(0.5)

            # Signal the end of the stream
            yield 'event: eof\ndata: null\n\n'

            # One more for good measure - client should not get this
            yield 'event: past-end\ndata: null\n\n'
def finalizeInstance(event):
    job = event.info['job']

    if job.get("instance_id"):
        instance = Instance().load(job["instance_id"], force=True)

        if (instance["status"] == InstanceStatus.LAUNCHING
                and job["status"] == JobStatus.ERROR  # noqa
            ):
            instance["status"] = InstanceStatus.ERROR
            Instance().updateInstance(instance)

    if job['title'] == 'Spawn Instance' and job.get('status') is not None:
        status = int(job['status'])
        instance_id = job['args'][0]['instanceId']
        instance = Instance().load(instance_id, force=True, exc=True)
        update = True
        if (status == JobStatus.SUCCESS
                and instance["status"] == InstanceStatus.LAUNCHING  # noqa
            ):
            service = getCeleryApp().AsyncResult(job['celeryTaskId']).get()
            valid_keys = set(containerInfoSchema['properties'].keys())
            containerInfo = {key: service.get(key, '') for key in valid_keys}
            url = service.get('url', 'https://google.com')
            _wait_for_server(url)

            # Since _wait_for_server can potentially take some time,
            # we need to refresh the state of the instance
            instance = Instance().load(instance_id, force=True, exc=True)
            if instance["status"] != InstanceStatus.LAUNCHING:
                return  # bail

            # Preserve the imageId / current digest in containerInfo
            tale = Tale().load(instance['taleId'], force=True)
            containerInfo['imageId'] = tale['imageId']
            containerInfo['digest'] = tale['imageInfo']['digest']

            instance.update({
                'url': url,
                'status': InstanceStatus.RUNNING,
                'containerInfo': containerInfo,
            })
            if "sessionId" in service:
                instance["sessionId"] = ObjectId(service["sessionId"])
        elif (status == JobStatus.ERROR
              and instance["status"] != InstanceStatus.ERROR  # noqa
              ):
            instance['status'] = InstanceStatus.ERROR
        elif (status in (JobStatus.QUEUED, JobStatus.RUNNING)
              and instance["status"] != InstanceStatus.LAUNCHING  # noqa
              ):
            instance['status'] = InstanceStatus.LAUNCHING
        else:
            update = False

        if update:
            msg = "Updating instance ({_id}) in finalizeInstance".format(
                **instance)
            msg += " for job(id={_id}, status={status})".format(**job)
            logger.debug(msg)
            Instance().updateInstance(instance)
示例#18
0
文件: __init__.py 项目: Kitware/flow
def load(info):
    flow_mako = os.path.join(os.path.dirname(__file__), "flow.mako")
    flow_webroot = Webroot(flow_mako)
    flow_webroot.updateHtmlVars({
        'brand': 'Arbor'
    })

    # @todo somehow the API lives at /api/v1 and /girder/api/v1
    info['serverRoot'], info['serverRoot'].girder = (flow_webroot,
                                                     info['serverRoot'])

    info['serverRoot'].api = info['serverRoot'].girder.api

    staticDir = os.path.join(info['pluginRootDir'], 'static')
    if os.path.isdir(staticDir):
        for path in os.listdir(staticDir):
            if os.path.isdir(os.path.join(staticDir, path)):
                info['config'][str('/' + path)] = {
                    'tools.staticdir.on': True,
                    'tools.staticdir.dir': os.path.join(staticDir, path),
                    'tools.staticdir.index': 'index.html'
                }

    @access.public
    def flowConvert(itemId, inputType, inputFormat, outputFormat, params):
        itemApi = info['apiRoot'].item

        content = getItemContent(itemId, itemApi)

        asyncResult = getCeleryApp().send_task('girder_worker.convert', [
            inputType,
            {"data": content, "format": inputFormat},
            {"format": outputFormat}
        ])

        return asyncResult.get()
    flowConvert.description = (
        Description('Convert an item from one format to another')
        .param('itemId', 'ID of the item to be converted')
        .param('inputType', 'The type of the input data')
        .param('inputFormat', 'The format of the input data')
        .param('outputFormat', 'The desired output format'))

    @access.public
    def getTaskId(jobId):
        # Get the celery task ID for this job.
        jobApi = info['apiRoot'].job
        job = jobApi.model('job', 'jobs').load(
            jobId, user=jobApi.getCurrentUser(), level=AccessType.READ)
        return job["celeryTaskId"]

    @access.public
    def flowRunStatus(itemId, jobId, params):
        celeryTaskId = getTaskId(jobId)

        # Get the celery result for the corresponding task ID.
        result = AsyncResult(celeryTaskId, backend=getCeleryApp().backend)
        try:
            response = {'status': result.state}
            if result.state == celery.states.FAILURE:
                response['message'] = str(result.result)
            elif result.state == 'PROGRESS':
                response['meta'] = str(result.result)
            return response
        except Exception:
            return {
                'status': 'FAILURE',
                'message': sys.exc_info(),
                'trace': sys.exc_info()[2]
            }
    flowRunStatus.description = (
        Description('Show the status of a flow task')
        .param('jobId', 'The job ID for this task.', paramType='path')
        .param('itemId', 'Not used.', paramType='path'))

    @access.public
    def flowRunResult(itemId, jobId, params):
        celeryTaskId = getTaskId(jobId)
        job = AsyncResult(celeryTaskId, backend=getCeleryApp().backend)
        return {'result': job.result}
    flowRunResult.description = (
        Description('Show the final output of a flow task.')
        .param('jobId', 'The job ID for this task.', paramType='path')
        .param('itemId', 'Not used.', paramType='path'))

    @access.public
    @rest.boundHandler(info['apiRoot'].item)
    @rest.loadmodel(map={'itemId': 'item'}, model='item',
                    level=AccessType.READ)
    def flowRun(self, item, params):
        # Make sure that we have permission to perform this analysis.
        # import pudb
        # pu.db
        user = self.getCurrentUser()

        settings = ModelImporter.model('setting')
        requireAuth = settings.get(FlowPluginSettings.REQUIRE_AUTH, True)

        if requireAuth:
            safeFolders = settings.get(FlowPluginSettings.SAFE_FOLDERS, ())
            fullAccessUsers = settings.get(FlowPluginSettings.FULL_ACCESS_USERS, ())
            fullAccessGrps = settings.get(FlowPluginSettings.FULL_ACCESS_GROUPS, ())
            userGrps = {str(id) for id in user.get('groups', ())}

            if (str(item['folderId']) not in safeFolders and (
                    not user or user['login'] not in fullAccessUsers) and
                    not userGrps & set(fullAccessGrps)):
                raise AccessException('Unauthorized user.')

        analysis = item.get('meta', {}).get('analysis')

        if type(analysis) is not dict:
            raise rest.RestException(
                'Must specify a valid JSON object as the "analysis" metadata '
                'field on the input item.')
        # Get the analysis parameters (includes inputs & outputs).
        try:
            kwargs = json.load(cherrypy.request.body)
        except ValueError:
            raise rest.RestException(
                'You must pass a valid JSON object in the request body.')

        return runAnalysis(user, analysis, kwargs, item)
    flowRun.description = (
        Description('Run a task specified by item metadata.')
        .param('itemId', 'The item containing the analysis as metadata.',
               paramType='path')
        .param('kwargs', 'Additional kwargs for the worker task.',
               paramType='body'))

    @access.public
    def flowRunOutput(itemId, jobId, params):
        jobApi = info['apiRoot'].job
        celeryTaskId = getTaskId(jobId)
        timeout = 300
        cherrypy.response.headers['Content-Type'] = 'text/event-stream'
        cherrypy.response.headers['Cache-Control'] = 'no-cache'

        def sseMessage(output):
            if type(output) == unicode:
                output = output.encode('utf8')
            return 'event: log\ndata: {}\n\n'.format(output)

        def streamGen():
            start = time.time()
            endtime = None
            oldLog = ''
            while (time.time() - start < timeout and
                   cherrypy.engine.state == cherrypy.engine.states.STARTED and
                   (endtime is None or time.time() < endtime)):
                # Display new log info from this job since the
                # last execution of this loop.
                job = jobApi.model('job', 'jobs').load(
                    jobId,
                    user=jobApi.getCurrentUser(),
                    level=AccessType.READ)
                newLog = job['log']
                if newLog != oldLog:
                    start = time.time()
                    logDiff = newLog[newLog.find(oldLog) + len(oldLog):]
                    oldLog = newLog
                    # We send a separate message for each line,
                    # as I discovered that any information after the
                    # first newline was being lost...
                    for line in logDiff.rstrip().split('\n'):
                        yield sseMessage(line)
                if endtime is None:
                    result = AsyncResult(celeryTaskId,
                                         backend=getCeleryApp().backend)
                    if (result.state == celery.states.FAILURE or
                            result.state == celery.states.SUCCESS or
                            result.state == celery.states.REVOKED):
                        # Stop checking for messages in 5 seconds
                        endtime = time.time() + 5
                time.sleep(0.5)

            # Signal the end of the stream
            yield 'event: eof\ndata: null\n\n'

            # One more for good measure - client should not get this
            yield 'event: past-end\ndata: null\n\n'

        return streamGen

    @access.public
    def flowStopRun(jobId, params):
        task = AsyncResult(jobId, backend=getCeleryApp().backend)
        task.revoke(getCeleryApp().broker_connection(), terminate=True)
        return {'status': task.state}
    flowStopRun.description = (
        Description('Stop execution of the specified job')
        .param('jobId', 'The Job ID for this task'))

    info['apiRoot'].flow_validator = Validator(getCeleryApp())
    info['apiRoot'].item.route(
        'POST',
        ('flow', ':inputType', ':inputFormat', ':outputFormat'),
        flowConvertData)

    info['apiRoot'].item.route(
        'GET',
        (':itemId', 'flow', ':jobId', 'status'),
        flowRunStatus)

    info['apiRoot'].item.route(
        'GET',
        (':itemId', 'flow', ':jobId', 'result'),
        flowRunResult)

    info['apiRoot'].item.route(
        'POST',
        (':itemId', 'flow'),
        flowRun)

    info['apiRoot'].item.route(
        'GET',
        (':itemId', 'flow', ':jobId', 'output'),
        flowRunOutput)

    info['apiRoot'].item.route(
        'GET',
        (':itemId', 'flow', ':inputType', ':inputFormat',
         ':outputFormat'),
        flowConvert)

    info['apiRoot'].item.route(
        'DELETE',
        (':itemId', 'flow', ':jobId'),
        flowStopRun)

    events.bind('model.setting.validate', 'flow', validateSettings)
示例#19
0
 def flowStopRun(jobId, params):
     task = AsyncResult(jobId, backend=getCeleryApp().backend)
     task.revoke(getCeleryApp().broker_connection(), terminate=True)
     return {'status': task.state}
示例#20
0
 def flowRunResult(itemId, jobId, params):
     celeryTaskId = getTaskId(jobId)
     job = AsyncResult(celeryTaskId, backend=getCeleryApp().backend)
     return {'result': job.result}
示例#21
0
文件: __init__.py 项目: Kitware/flow
 def flowRunResult(itemId, jobId, params):
     celeryTaskId = getTaskId(jobId)
     job = AsyncResult(celeryTaskId, backend=getCeleryApp().backend)
     return {'result': job.result}
示例#22
0
    def createNotebook(self, folder, user, token, frontend, scripts=None,
                       when=None, save=True):
        existing = self.findOne({
            'folderId': folder['_id'],
            'creatorId': user['_id'],
            'frontendId': frontend['_id']
        })
        if existing:
            return existing

        now = datetime.datetime.utcnow()
        notebook = {
            'folderId': folder['_id'],
            'creatorId': user['_id'],
            'frontendId': frontend['_id'],
            'status': NotebookStatus.STARTING,
            'created': now
        }
        if save:
            notebook = self.save(notebook)

        total = 3.0
        notification = Notification().initProgress(
            user, 'Starting Notebook', total, state=ProgressState.QUEUED,
            current=0.0, message='Initialization',
            estimateTime=False, resourceName=self.name,
            resource=notebook)

        payload = {
            'girder_token': token['_id'],
            'folder': {k: str(v) for k, v in folder.items()},
            'frontend': {k: str(v) for k, v in frontend.items()},
            'scripts': scripts,
            'api_version': API_VERSION
        }

        # do the job
        Notification().updateProgress(
            notification, total=total, current=1.0,
            state=ProgressState.ACTIVE, message='Creating and mounting Filesystem',
            expires=datetime.datetime.utcnow() + datetime.timedelta(seconds=30)
        )
        volumeTask = getCeleryApp().send_task(
            'gwvolman.tasks.create_volume', args=[payload], kwargs={},
        )
        volumeInfo = volumeTask.get()
        payload.update(volumeInfo)

        Notification().updateProgress(
            notification, total=total, current=2.0,
            state=ProgressState.ACTIVE, message='Launching Container',
            expires=datetime.datetime.utcnow() + datetime.timedelta(seconds=30)
        )
        serviceTask = getCeleryApp().send_task(
            'gwvolman.tasks.launch_container', args=[payload], kwargs={},
            queue='manager'
        )
        serviceInfo = serviceTask.get()
        serviceInfo.update(volumeInfo)

        tmpnb_url = urlsplit(
            Setting().get(PluginSettings.TMPNB_URL)
        )
        domain = '{}.{}'.format(serviceInfo['serviceId'], tmpnb_url.netloc)
        url = '{}://{}/{}'.format(
            tmpnb_url.scheme, domain, serviceInfo.get('urlPath', ''))

        Notification().updateProgress(
            notification, total=total, current=2.5,
            state=ProgressState.ACTIVE, message='Waiting for Notebook to start',
            expires=datetime.datetime.utcnow() + datetime.timedelta(seconds=30)
        )
        _wait_for_server(url)

        notebook.update({
            'status': NotebookStatus.RUNNING,   # be optimistic for now
            'serviceInfo': serviceInfo,
            'url': url
        })

        Notification().updateProgress(
            notification, total=total, current=3.0,
            state=ProgressState.SUCCESS, message='Redirecting to notebook',
            expires=datetime.datetime.utcnow() + datetime.timedelta(seconds=5)
        )

        self.setPublic(notebook, public=False)
        self.setUserAccess(notebook, user=user, level=AccessType.ADMIN)
        if save:
            notebook = self.save(notebook)

        return notebook
示例#23
0
def load(info):
    flow_mako = os.path.join(os.path.dirname(__file__), "flow.mako")
    flow_webroot = Webroot(flow_mako)
    flow_webroot.updateHtmlVars({'brand': 'Arbor'})

    # @todo somehow the API lives at /api/v1 and /girder/api/v1
    info['serverRoot'], info['serverRoot'].girder = (flow_webroot,
                                                     info['serverRoot'])

    info['serverRoot'].api = info['serverRoot'].girder.api

    staticDir = os.path.join(info['pluginRootDir'], 'static')
    if os.path.isdir(staticDir):
        for path in os.listdir(staticDir):
            if os.path.isdir(os.path.join(staticDir, path)):
                info['config'][str('/' + path)] = {
                    'tools.staticdir.on': True,
                    'tools.staticdir.dir': os.path.join(staticDir, path),
                    'tools.staticdir.index': 'index.html'
                }

    @access.public
    def flowConvert(itemId, inputType, inputFormat, outputFormat, params):
        itemApi = info['apiRoot'].item

        content = getItemContent(itemId, itemApi)

        asyncResult = getCeleryApp().send_task('girder_worker.convert', [
            inputType, {
                "data": content,
                "format": inputFormat
            }, {
                "format": outputFormat
            }
        ])

        return asyncResult.get()

    flowConvert.description = (
        Description('Convert an item from one format to another').param(
            'itemId', 'ID of the item to be converted').param(
                'inputType', 'The type of the input data').param(
                    'inputFormat', 'The format of the input data').param(
                        'outputFormat', 'The desired output format'))

    @access.public
    def getTaskId(jobId):
        # Get the celery task ID for this job.
        jobApi = info['apiRoot'].job
        job = jobApi.model('job', 'jobs').load(jobId,
                                               user=jobApi.getCurrentUser(),
                                               level=AccessType.READ)
        return job["celeryTaskId"]

    @access.public
    def flowRunStatus(itemId, jobId, params):
        celeryTaskId = getTaskId(jobId)

        # Get the celery result for the corresponding task ID.
        result = AsyncResult(celeryTaskId, backend=getCeleryApp().backend)
        try:
            response = {'status': result.state}
            if result.state == celery.states.FAILURE:
                response['message'] = str(result.result)
            elif result.state == 'PROGRESS':
                response['meta'] = str(result.result)
            return response
        except Exception:
            return {
                'status': 'FAILURE',
                'message': sys.exc_info(),
                'trace': sys.exc_info()[2]
            }

    flowRunStatus.description = (
        Description('Show the status of a flow task').param(
            'jobId', 'The job ID for this task.',
            paramType='path').param('itemId', 'Not used.', paramType='path'))

    @access.public
    def flowRunResult(itemId, jobId, params):
        celeryTaskId = getTaskId(jobId)
        job = AsyncResult(celeryTaskId, backend=getCeleryApp().backend)
        return {'result': job.result}

    flowRunResult.description = (
        Description('Show the final output of a flow task.').param(
            'jobId', 'The job ID for this task.',
            paramType='path').param('itemId', 'Not used.', paramType='path'))

    @access.public
    @rest.boundHandler(info['apiRoot'].item)
    @rest.loadmodel(map={'itemId': 'item'},
                    model='item',
                    level=AccessType.READ)
    def flowRun(self, item, params):
        # Make sure that we have permission to perform this analysis.
        # import pudb
        # pu.db
        user = self.getCurrentUser()

        settings = ModelImporter.model('setting')
        requireAuth = settings.get(FlowPluginSettings.REQUIRE_AUTH, True)

        if requireAuth:
            safeFolders = settings.get(FlowPluginSettings.SAFE_FOLDERS, ())
            fullAccessUsers = settings.get(
                FlowPluginSettings.FULL_ACCESS_USERS, ())
            fullAccessGrps = settings.get(
                FlowPluginSettings.FULL_ACCESS_GROUPS, ())
            userGrps = {str(id) for id in user.get('groups', ())}

            if (str(item['folderId']) not in safeFolders
                    and (not user or user['login'] not in fullAccessUsers)
                    and not userGrps & set(fullAccessGrps)):
                raise AccessException('Unauthorized user.')

        analysis = item.get('meta', {}).get('analysis')

        if type(analysis) is not dict:
            raise rest.RestException(
                'Must specify a valid JSON object as the "analysis" metadata '
                'field on the input item.')
        # Get the analysis parameters (includes inputs & outputs).
        try:
            kwargs = json.load(cherrypy.request.body)
        except ValueError:
            raise rest.RestException(
                'You must pass a valid JSON object in the request body.')

        return runAnalysis(user, analysis, kwargs, item)

    flowRun.description = (
        Description('Run a task specified by item metadata.').param(
            'itemId',
            'The item containing the analysis as metadata.',
            paramType='path').param('kwargs',
                                    'Additional kwargs for the worker task.',
                                    paramType='body'))

    @access.public
    def flowRunOutput(itemId, jobId, params):
        jobApi = info['apiRoot'].job
        celeryTaskId = getTaskId(jobId)
        timeout = 300
        cherrypy.response.headers['Content-Type'] = 'text/event-stream'
        cherrypy.response.headers['Cache-Control'] = 'no-cache'

        def sseMessage(output):
            if type(output) == unicode:
                output = output.encode('utf8')
            return 'event: log\ndata: {}\n\n'.format(output)

        def streamGen():
            start = time.time()
            endtime = None
            oldLog = ''
            while (time.time() - start < timeout
                   and cherrypy.engine.state == cherrypy.engine.states.STARTED
                   and (endtime is None or time.time() < endtime)):
                # Display new log info from this job since the
                # last execution of this loop.
                job = jobApi.model('job',
                                   'jobs').load(jobId,
                                                user=jobApi.getCurrentUser(),
                                                level=AccessType.READ)
                newLog = job['log']
                if newLog != oldLog:
                    start = time.time()
                    logDiff = newLog[newLog.find(oldLog) + len(oldLog):]
                    oldLog = newLog
                    # We send a separate message for each line,
                    # as I discovered that any information after the
                    # first newline was being lost...
                    for line in logDiff.rstrip().split('\n'):
                        yield sseMessage(line)
                if endtime is None:
                    result = AsyncResult(celeryTaskId,
                                         backend=getCeleryApp().backend)
                    if (result.state == celery.states.FAILURE
                            or result.state == celery.states.SUCCESS
                            or result.state == celery.states.REVOKED):
                        # Stop checking for messages in 5 seconds
                        endtime = time.time() + 5
                time.sleep(0.5)

            # Signal the end of the stream
            yield 'event: eof\ndata: null\n\n'

            # One more for good measure - client should not get this
            yield 'event: past-end\ndata: null\n\n'

        return streamGen

    @access.public
    def flowStopRun(jobId, params):
        task = AsyncResult(jobId, backend=getCeleryApp().backend)
        task.revoke(getCeleryApp().broker_connection(), terminate=True)
        return {'status': task.state}

    flowStopRun.description = (
        Description('Stop execution of the specified job').param(
            'jobId', 'The Job ID for this task'))

    info['apiRoot'].flow_validator = Validator(getCeleryApp())
    info['apiRoot'].item.route(
        'POST', ('flow', ':inputType', ':inputFormat', ':outputFormat'),
        flowConvertData)

    info['apiRoot'].item.route('GET', (':itemId', 'flow', ':jobId', 'status'),
                               flowRunStatus)

    info['apiRoot'].item.route('GET', (':itemId', 'flow', ':jobId', 'result'),
                               flowRunResult)

    info['apiRoot'].item.route('POST', (':itemId', 'flow'), flowRun)

    info['apiRoot'].item.route('GET', (':itemId', 'flow', ':jobId', 'output'),
                               flowRunOutput)

    info['apiRoot'].item.route(
        'GET',
        (':itemId', 'flow', ':inputType', ':inputFormat', ':outputFormat'),
        flowConvert)

    info['apiRoot'].item.route('DELETE', (':itemId', 'flow', ':jobId'),
                               flowStopRun)

    events.bind('model.setting.validate', 'flow', validateSettings)
示例#24
0
文件: __init__.py 项目: Kitware/flow
 def flowStopRun(jobId, params):
     task = AsyncResult(jobId, backend=getCeleryApp().backend)
     task.revoke(getCeleryApp().broker_connection(), terminate=True)
     return {'status': task.state}