Beispiel #1
0
    def post(self):
        
        params = self.request.POST
        
        if 'delete' in params:

            # The jobs to delete are specified in the checkboxes
            jobs_to_delete = params.getall('select_job')
        
            service = backendservices(self.user_data)

            # Select the jobs to delete from the datastore
            result = {}
            for job_name in jobs_to_delete:
                try:
                    job = db.GqlQuery("SELECT * FROM StochKitJobWrapper WHERE user_id = :1 AND name = :2", self.user.user_id(),job_name).get()
                except Exception,e:
                    result = {'status':False,'msg':"Could not retrieve the jobs"+job_name+ " from the datastore."}
        
                job.delete()
    
            # Render the status page 
            # AH: This is a hack to prevent the page from reloading before the datastore transactions
            # have taken place. I think it is only necessary for the SQLLite backend stub.
            # TODO: We need a better way to check if the entities are gone from the datastore...
            time.sleep(0.5)
            context = self.getContext()
            self.render_response('status.html', **dict(result,**context))
    def delete(self, handler):
        logging.debug("StochOptimJobWrapper(cloudDatabaseID={0})".format(
            self.cloudDatabaseID))
        if self.outData is not None and os.path.exists(self.outData):
            shutil.rmtree(self.outData)

        if self.zipFileName is not None and os.path.exists(self.zipFileName):
            os.remove(self.zipFileName)

        try:
            self.stop(handler)
        except Exception as e:
            logging.exception(e)

        # delete on cloud
        if self.resource is not None and self.resource in backendservices.SUPPORTED_CLOUD_RESOURCES:
            try:
                service = backendservices(handler.user_data)
                service.deleteTasks(self)
            except Exception as e:
                logging.error(
                    "Failed to delete cloud resources of job {0}".format(
                        self.key().id()))
                logging.error(e)

        super(StochOptimJobWrapper, self).delete()
Beispiel #3
0
 def addSensitivityJob(self, job, globalOp = False):
     if job.status == "Finished":
         # Shared fields
         jsonJob = { "version" : self.version,
                     "userId" : job.userId,
                     "jobName" : job.jobName,
                     "startTime" : job.startTime,
                     "indata" : json.loads(job.indata),
                     "status" : job.status }
         if job.resource == "local":
             outputLocation = self.addFolder('sensitivityJobs/data/{0}'.format(job.jobName), job.outData)
             jsonJob["outData"] = outputLocation
         elif job.resource == "cloud":
             jsonJob["outputURL"] = job.outputURL
             # Only grab S3 data if user wants us to
             if (job.jobName in self.sensitivityJobsToDownload) or globalOp:
                 if job.outData is None or (job.outData is not None and not os.path.exists(job.outData)):
                     # Grab the output from S3 if we need to
                     service = backendservices()
                     service.fetchOutput(job.cloudDatabaseID, job.outputURL)
                     # Unpack it to its local output location
                     os.system('tar -xf' +job.cloudDatabaseID+'.tar')
                     job.outData = os.path.dirname(os.path.abspath(__file__))+'/../output/'+job.cloudDatabaseID
                     job.outData = os.path.abspath(job.outData)
                     # Update the DB entry
                     job.put()
                     # Clean up
                     os.remove(job.cloudDatabaseID+'.tar')
                 outputLocation = self.addFolder('sensitivityJobs/data/{0}'.format(job.jobName), job.outData)
                 jsonJob["outData"] = outputLocation
         self.addBytes('sensitivityJobs/{0}.json'.format(job.jobName), json.dumps(jsonJob, sort_keys=True, indent=4, separators=(', ', ': ')))
Beispiel #4
0
    def delete(self, handler):
        job = self
        stochkit_job = job.stochkit_job

        # TODO: Call the backend to kill and delete the job and all associated files.
        service = backendservices()

        if job.stochkit_job.zipFileName:
            if os.path.exists(job.stochkit_job.zipFileName):
                os.remove(job.stochkit_job.zipFileName)

        if stochkit_job.resource == "Local":
            service.deleteTaskLocal([stochkit_job.pid])

            time.sleep(0.25)

            status = service.checkTaskStatusLocal([stochkit_job.pid]).values()[0]
        else:
            db_credentials = handler.user_data.getCredentials()
            os.environ["AWS_ACCESS_KEY_ID"] = db_credentials["EC2_ACCESS_KEY"]
            os.environ["AWS_SECRET_ACCESS_KEY"] = db_credentials["EC2_SECRET_KEY"]
            service.deleteTasks([(stochkit_job.celery_pid, stochkit_job.pid)])

        if os.path.exists(stochkit_job.output_location):
            shutil.rmtree(stochkit_job.output_location)

        super(StochKitJobWrapper, self).delete()
    def delete(self, handle):
        self.stop(handle.user_data)

        if self.resource.lower() == 'molns':
            # TODO: call 'molns exec cleanup'
            pass
        else:
            # Call the backend to kill and delete the job and all associated files.
            service = backendservices(handle.user_data)

            if self.zipFileName is not None and os.path.exists(
                    self.zipFileName):
                os.remove(self.zipFileName)

            #delete the ouput results of execution locally, if exists.
            if self.outData is not None and os.path.exists(str(self.outData)):
                shutil.rmtree(self.outData)

            if self.resource is not None and self.resource in backendservices.SUPPORTED_CLOUD_RESOURCES:
                try:
                    service.deleteTasks(self)
                except Exception as e:
                    logging.error(
                        "Failed to delete cloud resources of job {0}".format(
                            self.key().id()))
                    logging.error(e)

        super(StochKitJobWrapper, self).delete()
Beispiel #6
0
 def fetchCloudOutput(self, stochkit_job_wrapper):
     '''
     '''
     try:
         result = {}
         stochkit_job = stochkit_job_wrapper.stochkit_job
         # Grab the remote files
         service = backendservices()
         service.fetchOutput(stochkit_job.pid, stochkit_job.output_url)
         
         # Unpack it to its local output location
         os.system('tar -xf' +stochkit_job.uuid+'.tar')
         stochkit_job.output_location = os.path.abspath(os.path.dirname(__file__))+'/../output/'+stochkit_job.uuid
         stochkit_job.output_location = os.path.abspath(stochkit_job.output_location)
         
         # Clean up
         os.remove(stochkit_job.uuid+'.tar')
         
         # Save the updated status
         stochkit_job_wrapper.put()
         
         result['status']=True
         result['msg'] = "Sucessfully fetched the remote output files."
         
     except Exception,e:
         logging.info('************************************* {0}'.format(e))
         result['status']=False
         result['msg'] = "Failed to fetch the remote files."
Beispiel #7
0
    def delete(self, handle):
        self.stop(handle.user_data)

        if self.resource.lower() == 'molns':
            # TODO: call 'molns exec cleanup'
            pass
        else:
            # Call the backend to kill and delete the job and all associated files.
            service = backendservices(handle.user_data)

            if self.zipFileName is not None and os.path.exists(self.zipFileName):
                os.remove(self.zipFileName)
            
            #delete the ouput results of execution locally, if exists.       
            if self.outData is not None and os.path.exists(str(self.outData)):
                shutil.rmtree(self.outData)

            if self.resource is not None and self.resource in backendservices.SUPPORTED_CLOUD_RESOURCES:
                try:
                    service.deleteTasks(self)
                except Exception as e:
                    logging.error("Failed to delete cloud resources of job {0}".format(self.key().id()))
                    logging.error(e)

        super(StochKitJobWrapper, self).delete()
Beispiel #8
0
    def delete(self, handler):
        self.stop(handler)
        service = backendservices(handler.user_data)

        #delete the local output
        if self.zipFileName is not None and os.path.exists(self.zipFileName):
            os.remove(self.zipFileName)

        if self.preprocessedDir is not None and os.path.exists(
                str(self.preprocessedDir)):
            shutil.rmtree(str(self.preprocessedDir))

        if self.vtkFileName is not None and os.path.exists(self.vtkFileName):
            os.remove(self.vtkFileName)

        if self.outData is not None and os.path.exists(self.outData):
            shutil.rmtree(self.outData)

        # delete on cloud
        if self.resource is not None and self.resource in backendservices.SUPPORTED_CLOUD_RESOURCES:
            try:
                service.deleteTasks(self)
            except Exception as e:
                logging.error(
                    "Failed to delete cloud resources of job {0}".format(
                        self.key().id()))
                logging.error(e)

        super(SpatialJobWrapper, self).delete()
Beispiel #9
0
    def delete(self, handler):
        self.stop(handler)
        service = backendservices(handler.user_data)
        
        #delete the local output
        if self.zipFileName is not None and os.path.exists(self.zipFileName):
            os.remove(self.zipFileName)

        if self.preprocessedDir is not None and os.path.exists(str(self.preprocessedDir)):
            shutil.rmtree(str(self.preprocessedDir))
                
        if self.vtkFileName is not None and os.path.exists(self.vtkFileName):
            os.remove(self.vtkFileName)

        if self.outData is not None and os.path.exists(self.outData):
            shutil.rmtree(self.outData)

        # delete on cloud
        if self.resource is not None and self.resource in backendservices.SUPPORTED_CLOUD_RESOURCES:
            try:
                service.deleteTasks(self)
            except Exception as e:
                logging.error("Failed to delete cloud resources of job {0}".format(self.key().id()))
                logging.error(e)
        
        super(SpatialJobWrapper, self).delete()
Beispiel #10
0
    def addStochKitJob(self, job, globalOp = False):
        stochkit_job = job.stochkit_job

        # Only export finished jobs
        if stochkit_job.status == "Finished":
            # These are fields shared among all jobs
            jsonJob = { "version" : self.version,
                        "name" : job.name,
                        "user_id" : job.user_id,
                        "stdout" : job.stdout,
                        "stderr" : job.stderr,
                        # These are things contained in the stochkit_job object
                        "type" : stochkit_job.type,
                        "status" : stochkit_job.status,
                        "modelName" : job.modelName,
                        "final_time" : stochkit_job.final_time,
                        "increment" : stochkit_job.increment,
                        "units" : job.stochkit_job.units,
                        "realizations" : stochkit_job.realizations,
                        "exec_type" : stochkit_job.exec_type,
                        "store_only_mean" : stochkit_job.store_only_mean,
                        "label_column_names" : stochkit_job.label_column_names,
                        "create_histogram_data" : stochkit_job.create_histogram_data,
                        "epsilon" : stochkit_job.epsilon,
                        "threshold" : stochkit_job.threshold,
                        "pid" : stochkit_job.pid,
                        "result" : stochkit_job.result }
            # For cloud jobs, we need to include the output_url and possibly grab the results from S3
            if stochkit_job.resource == 'Cloud':
                jsonJob["output_url"] = job.stochkit_job.output_url
                # Only grab S3 data if user wants us to
                #print 'globalOP', globalOp
                if (job.name in self.stochKitJobsToDownload) or globalOp:
                    if stochkit_job.output_location is None or (stochkit_job.output_location is not None and not os.path.exists(stochkit_job.output_location)):
                        # Grab the output from S3 if we need to
                        service = backendservices()
                        service.fetchOutput(stochkit_job.pid, stochkit_job.output_url)
                        # Unpack it to its local output location
                        os.system('tar -xf' +stochkit_job.uuid+'.tar')
                        stochkit_job.output_location = os.path.dirname(os.path.abspath(__file__))+'/../output/'+stochkit_job.uuid
                        stochkit_job.output_location = os.path.abspath(stochkit_job.output_location)
                        # Update the DB entry
                        job.put()
                        # Clean up
                        os.remove(stochkit_job.uuid+'.tar')
                    # Add its data to the zip archive
                    outputLocation = self.addFolder('stochkitJobs/data/{0}'.format(job.name), stochkit_job.output_location)
                    jsonJob["output_location"] = outputLocation
            # For local jobs, we need to include the output location in the zip archive
            elif stochkit_job.resource == 'Local':
                outputLocation = self.addFolder('stochkitJobs/data/{0}'.format(job.name), stochkit_job.output_location)
                jsonJob["stdout"] = "{0}/stdout".format(outputLocation)
                jsonJob["stderr"] = "{0}/stderr".format(outputLocation)
                jsonJob["output_location"] = outputLocation
            # Also be sure to include any extra attributes of job
            if job.attributes:
                jsonJob.update(job.attributes)
            # Add the JSON to the zip archive
            self.addBytes('stochkitJobs/{0}.json'.format(job.name), json.dumps(jsonJob, sort_keys=True, indent=4, separators=(', ', ': ')))
Beispiel #11
0
 def get(self):
     context = {
         'isAdminUser': self.user.is_admin_user()
     }
     # We can only pull results from S3 if we have valid AWS credentials
     if self.user_data.valid_credentials:
         credentials = self.user_data.getCredentials()
         # Get all the cloud jobs
         stochkit_jobs = db.GqlQuery("SELECT * FROM StochKitJobWrapper WHERE user_id = :1", self.user.user_id()).fetch(100000)
         stochkit_jobs = [job for job in stochkit_jobs if job.stochkit_job.resource == "Cloud" and job.stochkit_job.status == "Finished"]
         # Create the dictionary to pass to backend to check for sizes
         output_results_to_check = {}
         for cloud_job in stochkit_jobs:
             s3_url_segments = cloud_job.stochkit_job.output_url.split('/')
             # S3 URLs are in the form https://s3.amazonaws.com/bucket_name/key/name
             bucket_name = s3_url_segments[3]
             # key_name is the concatenation of all segments after the bucket_name
             key_name = '/'.join(s3_url_segments[4:])
             if bucket_name in output_results_to_check.keys():
                 output_results_to_check[bucket_name] += [(key_name, cloud_job.name)]
             else:
                 output_results_to_check[bucket_name] = [(key_name, cloud_job.name)]
         # Sensitivity Jobs
         sensi_jobs = db.GqlQuery("SELECT * FROM SensitivityJobWrapper WHERE userId = :1", self.user.user_id())
         sensi_jobs = [job for job in sensi_jobs if job.resource == "cloud" and job.status == "Finished"]
         for cloud_job in sensi_jobs:
             s3_url_segments = cloud_job.outputURL.split('/')
             # S3 URLs are in the form https://s3.amazonaws.com/bucket_name/key/name
             bucket_name = s3_url_segments[3]
             # key_name is the concatenation of all segments after the bucket_name
             key_name = '/'.join(s3_url_segments[4:])
             if bucket_name in output_results_to_check.keys():
                 output_results_to_check[bucket_name] += [(key_name, cloud_job.jobName)]
             else:
                 output_results_to_check[bucket_name] = [(key_name, cloud_job.jobName)]
         # Get all the job sizes from the backend
         service = backendservices()
         job_sizes = service.getSizeOfOutputResults(credentials['EC2_ACCESS_KEY'], credentials['EC2_SECRET_KEY'], output_results_to_check)
         # Add all of the relevant jobs to the context so they will be rendered on the page
         context["stochkit_jobs"] = []
         context["sensitivity_jobs"] = []
         for cloud_job in stochkit_jobs:
             job_name = cloud_job.name
             if job_name in job_sizes.keys():
                 # These are the relevant jobs
                 context["stochkit_jobs"].append({
                     'name': job_name,
                     'exec_type': cloud_job.stochkit_job.exec_type,
                     'size': '{0} KB'.format(round(float(job_sizes[job_name])/1024, 1))
                 })
         for cloud_job in sensi_jobs:
             job_name = cloud_job.jobName
             if job_name in job_sizes.keys():
                 context["sensitivity_jobs"].append({
                     'name': job_name,
                     'exec_type': 'sensitivity_jobs',
                     'size': '{0} KB'.format(round(float(job_sizes[job_name])/1024, 1))
                 })
     return self.render_response('exportimport.html', **context)
Beispiel #12
0
    def stop(self, user_data):
        # TODO: Call the backend to kill and delete the job and all associated files.
        service = backendservices(user_data)

        if self.resource.lower() == 'local':
            service.stopTaskLocal([self.pid])
        else:
            service.stopTasks(self)
 def post(self):
     self.response.headers['Content-Type'] = 'application/json'
       
     req_type = self.request.get('req_type')
     job_type = self.request.get('job_type')
     id = self.request.get('id')
     instance_type = self.request.get('instance_type')
     
     if req_type == 'analyze':
         logging.info('Analyzing the cost...')
         #job_type = params['job_type']
         
         logging.info('rerun cost analysis in '+instance_type)
         credentials =  self.user_data.getCredentials()
         access_key = credentials['EC2_ACCESS_KEY']
         secret_key = credentials['EC2_SECRET_KEY']
         backend_services = backendservice.backendservices(self.user_data)
         
         if not self.user_data.valid_credentials or not backend_services.isOneOrMoreComputeNodesRunning(instance_type):
             logging.info('You must have at least one active '+instance_type+' compute node to run in the cloud.')
             self.response.write(json.dumps({
                 'status': False,
                 'msg': 'You must have at least one active '+instance_type+' compute node to run in the cloud.'
             }))
             return
         
         result = {}
         try:        
                 uuid, _ = self.get_uuid_name(id, job_type)
                 logging.info('start to rerun the job {0} for cost analysis'.format(str(uuid)))
                 # Set up CloudTracker with user credentials and specified UUID to rerun the job
                 ct = CloudTracker(access_key, secret_key, str(uuid), self.user_data.getBucketName())
                 has_prov = not ct.if_tracking() 
                 # If there is no provenance data for this job, report an error to the user
                 if not has_prov:
                     result = {'status':"fail",'msg':"The job with this ID does not exist or cannot be reproduced."}    
                     self.response.write(json.dumps(result))
                     return
             
                 params = ct.get_input()
                 
                 params['cost_analysis_uuid'] = uuid
                 
                 cloud_result = backend_services.submit_cloud_task(params, agent_type = AgentTypes.EC2, instance_type = instance_type, cost_replay = True)
                 
                 if not cloud_result["success"]:
                     e = cloud_result["exception"]
                     result = {
                               'status': False,
                               'msg': 'Cloud execution failed: '+str(e)
                              }
                     return result 
                 
                 result = {'status':True,'msg':'Cost analysis submitted successfully.'}
         
         
         except Exception,e:
                 result = {'status':False,'msg':'Cloud execution failed: '+str(e)}
Beispiel #14
0
    def __init__(self, request, response):

        self.auth = auth.get_auth()

        # If not logged in, the dispatch() call will redirect to /login if needed
        if self.logged_in():
            # Make sure a handler has a reference to the current user
            user_dict = self.auth.get_user_by_session()
            self.user = self.auth.store.user_model.get_by_id(
                user_dict['user_id'])

            # Most pages will need the UserData, so for convenience we add it here.
            self.user_data = db.GqlQuery(
                "SELECT * FROM UserData WHERE user_id = :1",
                self.user.user_id()).get()

            # If the user_data does not exist in the datastore, we instantiate it here
            if self.user_data == None:

                user_data = UserData()
                user_data.user_id = self.user.user_id()

                # Get optional app-instance configurations and add those to user_data
                credentials = {'EC2_SECRET_KEY': "", 'EC2_ACCESS_KEY': ""}
                try:
                    env_variables = app.config.get('env_variables')
                    user_data.env_variables = json.dumps(env_variables)
                    if 'AWS_ACCESS_KEY' in env_variables:
                        credentials['EC2_ACCESS_KEY'] = env_variables[
                            'AWS_ACCESS_KEY']
                    if 'AWS_SECRET_KEY' in env_variables:
                        credentials['EC2_SECRET_KEY'] = env_variables[
                            'AWS_SECRET_KEY']
                except:
                    raise

                user_data.setCredentials(credentials)

                # Check if the credentials are valid
                service = backendservices(user_data)
                params = {}
                params['credentials'] = credentials
                params["infrastructure"] = "ec2"
                if service.validateCredentials(params):
                    user_data.valid_credentials = True
                else:
                    user_data.valid_credentials = False

                # Create an unique bucket name for the user
                import uuid

                user_data.setBucketName('stochss-output-' + str(uuid.uuid4()))

                user_data.put()
                self.user_data = user_data

        webapp2.RequestHandler.__init__(self, request, response)
Beispiel #15
0
    def runCloud(self, data):
        self.user_data.set_selected(1)
        service = backendservices(self.user_data)
        if not service.isOneOrMoreComputeNodesRunning():
            raise Exception(
                'No cloud computing resources found. (Have they been started?)'
            )

        # If the seed is negative, this means choose a seed >= 0 randomly
        if int(data['seed']) < 0:
            random.seed()
            data['seed'] = random.randint(0, 2147483647)

        pymodel = self.construct_pyurdme_model(data)
        #logging.info('DATA: {0}'.format(data))
        #####
        cloud_params = {
            "job_type": "spatial",
            "simulation_algorithm": data['algorithm'],
            "simulation_realizations": data['realizations'],
            "simulation_seed": data['seed'],
            #            "bucketname" : self.user_data.getBucketName(),  #implys EC2, should be in backendservices
            "paramstring": '',
        }

        logging.debug('cloud_params = {}'.format(pprint.pformat(cloud_params)))

        cloud_params['document'] = pickle.dumps(pymodel)
        #logging.debug('PYURDME: {0}'.format(cloud_params['document']))

        # Send the task to the backend
        cloud_result = service.submit_cloud_task(params=cloud_params)

        if not cloud_result["success"]:
            e = cloud_result["exception"]
            raise Exception("Cloud execution failed: {0}".format(e))

        celery_task_id = cloud_result["celery_pid"]
        taskid = cloud_result["db_id"]

        job = SpatialJobWrapper()
        job.type = 'PyURDME Ensemble'
        job.user_id = self.user.user_id()
        job.startTime = time.strftime("%Y-%m-%d-%H-%M-%S")
        job.name = data["jobName"]
        job.indata = json.dumps(data)
        job.outData = None  # This is where the data should be locally, when we get data from cloud, it must be put here
        job.modelName = pymodel.name
        job.resource = cloud_result['resource']
        job.cloudDatabaseID = taskid
        job.celeryPID = celery_task_id
        job.status = "Running"
        job.output_stored = "True"
        job.put()

        return job
Beispiel #16
0
    def runCloud(self, data):
        self.user_data.set_selected(1)
        service = backendservices(self.user_data)
        if not service.isOneOrMoreComputeNodesRunning():
            raise Exception('No cloud computing resources found. (Have they been started?)')

        # If the seed is negative, this means choose a seed >= 0 randomly
        if int(data['seed']) < 0:
            random.seed()
            data['seed'] = random.randint(0, 2147483647)

        pymodel = self.construct_pyurdme_model(data)
        #logging.info('DATA: {0}'.format(data))
        #####
        cloud_params = {
            "job_type": "spatial",
            "simulation_algorithm" : data['algorithm'],
            "simulation_realizations" : data['realizations'],
            "simulation_seed" : data['seed'],
#            "bucketname" : self.user_data.getBucketName(),  #implys EC2, should be in backendservices
            "paramstring" : '',
        }

        logging.debug('cloud_params = {}'.format(pprint.pformat(cloud_params)))

        cloud_params['document'] = pickle.dumps(pymodel)
        #logging.debug('PYURDME: {0}'.format(cloud_params['document']))

        # Send the task to the backend
        cloud_result = service.submit_cloud_task(params=cloud_params)

        if not cloud_result["success"]:
            e = cloud_result["exception"]
            raise Exception("Cloud execution failed: {0}".format(e))
        
        celery_task_id = cloud_result["celery_pid"]
        taskid = cloud_result["db_id"]

        job = SpatialJobWrapper()
        job.type = 'PyURDME Ensemble'
        job.user_id = self.user.user_id()
        job.startTime = time.strftime("%Y-%m-%d-%H-%M-%S")
        job.name = data["jobName"]
        job.indata = json.dumps(data)
        job.outData = None  # This is where the data should be locally, when we get data from cloud, it must be put here
        job.modelName = pymodel.name
        job.resource = cloud_result['resource']
        job.cloudDatabaseID = taskid
        job.celeryPID = celery_task_id
        job.status = "Running"
        job.output_stored = "True"
        job.put()

        return job
Beispiel #17
0
    def __init__(self, request, response):

        self.auth = auth.get_auth()

        # If not logged in, the dispatch() call will redirect to /login if needed
        if self.logged_in():
            # Make sure a handler has a reference to the current user
            user_dict = self.auth.get_user_by_session()
            self.user = self.auth.store.user_model.get_by_id(user_dict['user_id'])

            # Most pages will need the UserData, so for convenience we add it here.
            self.user_data = db.GqlQuery("SELECT * FROM UserData WHERE user_id = :1", self.user.user_id()).get()

            # If the user_data does not exist in the datastore, we instantiate it here
            if self.user_data == None:

                user_data = UserData()
                user_data.user_id = self.user.user_id()

                # Get optional app-instance configurations and add those to user_data
                credentials = {'EC2_SECRET_KEY': "",
                               'EC2_ACCESS_KEY': ""}
                try:
                    env_variables = app.config.get('env_variables')
                    user_data.env_variables = json.dumps(env_variables)
                    if 'AWS_ACCESS_KEY' in env_variables:
                        credentials['EC2_ACCESS_KEY'] = env_variables['AWS_ACCESS_KEY']
                    if 'AWS_SECRET_KEY' in env_variables:
                        credentials['EC2_SECRET_KEY'] = env_variables['AWS_SECRET_KEY']
                except:
                    raise

                user_data.setCredentials(credentials)

                # Check if the credentials are valid
                service = backendservices(user_data)
                params = {}
                params['credentials'] = credentials
                params["infrastructure"] = "ec2"
                if service.validateCredentials(params):
                    user_data.valid_credentials = True
                else:
                    user_data.valid_credentials = False

                # Create an unique bucket name for the user
                import uuid

                user_data.setBucketName('stochss-output-' + str(uuid.uuid4()))

                user_data.put()
                self.user_data = user_data

        webapp2.RequestHandler.__init__(self, request, response)
Beispiel #18
0
    def get(self):
        """ """
        env_variables = self.user_data.env_variables
        if env_variables == None:
            context = {}
        else:
            context = json.loads(env_variables)

        logging.info(context)

        # Check if there is an internet connection available
        if internet_on():
            # Check if updates available. Assume a divergent branch can be updated. This is actually false, but we'll go with it. We need to change this so the user can make local changes...
            h = subprocess.Popen('git remote update'.split(),
                                 stdout=subprocess.PIPE,
                                 stderr=subprocess.PIPE)
            stdout, stderr = h.communicate()

            h = subprocess.Popen('git status -uno'.split(),
                                 stdout=subprocess.PIPE,
                                 stderr=subprocess.PIPE)
            stdout, stderr = h.communicate()

            update_available = re.search('behind', stdout)

            if update_available:
                service = backendservices()

                all_stochkit_jobs = db.GqlQuery(
                    "SELECT * FROM StochKitJobWrapper WHERE user_id = :1",
                    self.user.user_id())

                # Check to make sure no jobs are running
                context['nojobs'] = True
                if all_stochkit_jobs != None:
                    for job in all_stochkit_jobs.run():
                        stochkit_job = job.stochkit_job
                        if not stochkit_job.status == "Finished":
                            res = service.checkTaskStatusLocal(
                                [stochkit_job.pid])
                            if res[stochkit_job.pid]:
                                context['nojobs'] = False

                context['update'] = True
            else:
                context['update'] = False

            context['nointernet'] = False
        else:
            context['nointernet'] = True

        self.render_response("updates.html", **context)
Beispiel #19
0
    def post(self):
        
        params = self.request.POST
        
        if 'delete' in params:

            # The jobs to delete are specified in the checkboxes
            jobs_to_delete = params.getall('select_job')
        
            service = backendservices()

            # Select the jobs to delete from the datastore
            result = {}
            for job_name in jobs_to_delete:
                try:
                    job = db.GqlQuery("SELECT * FROM StochKitJobWrapper WHERE user_id = :1 AND name = :2", self.user.user_id(),job_name).get()
                    stochkit_job = job.stochkit_job
                except Exception,e:
                    result = {'status':False,'msg':"Could not retrieve the jobs"+job_name+ " from the datastore."}
        
                # TODO: Call the backend to kill and delete the job and all associated files.
                try:
                    if stochkit_job.resource == 'Local':
                        service.deleteTaskLocal([stochkit_job.pid])

                        time.sleep(0.25)

                        status = service.checkTaskStatusLocal([stochkit_job.pid]).values()[0]

                        if status:
                            raise Exception("")
                    else:
                        db_credentials = self.user_data.getCredentials()
                        os.environ["AWS_ACCESS_KEY_ID"] = db_credentials['EC2_ACCESS_KEY']
                        os.environ["AWS_SECRET_ACCESS_KEY"] = db_credentials['EC2_SECRET_KEY']
                        service.deleteTasks([(stochkit_job.celery_pid,stochkit_job.pid)])
                    isdeleted_backend = True
                except Exception,e:
                    isdeleted_backend = False
                    result['status']=False
                    result['msg'] = "Failed to delete task with PID " + str(stochkit_job.celery_pid) + str(e)
                #        
                if isdeleted_backend:
                    # Delete all the local files and delete the job from the datastore
                    try:
                        # We remove the local entry of the job output directory
                        if os.path.exists(stochkit_job.output_location):
                            shutil.rmtree(stochkit_job.output_location)
                        db.delete(job)
                    except Exception,e:
                        result = {'status':False,'msg':"Failed to delete job "+job_name+str(e)}
Beispiel #20
0
 def stop(self, user_data):
     # TODO: Call the backend to kill and delete the job and all associated files.
     if self.resource.lower() == 'molns':
         return
     else:
         service = backendservices(user_data)
         if self.resource is None:
             return # avoid error on "NoneType.lower()"
         elif self.resource.lower() == 'local':
             service.stopTaskLocal([self.pid])
         elif self.resource.lower() == 'qsub' or self.resource.lower() == 'molns':
             return # can't stop batching processing tasks (at least not easily)
         else:
             service.stopTasks(self)
Beispiel #21
0
 def stop(self, user_data):
     # TODO: Call the backend to kill and delete the job and all associated files.
     if self.resource.lower() == 'molns':
         return
     else:
         service = backendservices(user_data)
         if self.resource is None:
             return  # avoid error on "NoneType.lower()"
         elif self.resource.lower() == 'local':
             service.stopTaskLocal([self.pid])
         elif self.resource.lower() == 'qsub' or self.resource.lower(
         ) == 'molns':
             return  # can't stop batching processing tasks (at least not easily)
         else:
             service.stopTasks(self)
 def stop(self, handler):
     if self.status == "Running" or self.status == "Pending":
         service = backendservices(handler.user_data)
         if self.resource is not None and self.resource.lower() == "local":
             service.stopTaskLocal([int(self.pid)])
         elif self.resource in backendservices.SUPPORTED_CLOUD_RESOURCES:
             # Write the finalized file
             if self.outData is None or not os.path.exists(self.outData):
                 self.outData = os.path.abspath(
                     os.path.dirname(os.path.abspath(__file__)) +
                     '/../output/' + self.cloudDatabaseID)
                 try:
                     logging.debug(
                         'stochoptim_job.stop() outData is None, makeing direcotry = {0}'
                         .format(self.outData))
                     os.mkdir(self.outData)
                 except Exception as e:
                     logging.exception(e)
                     #TODO, comment out above
                     #pass
             else:
                 logging.debug(
                     'stochoptim_job.stop() outData is not None, = {0}'.
                     format(self.outData))
             try:
                 file_to_check = "{0}/return_code".format(self.outData)
                 if not os.path.exists(file_to_check):
                     with open(file_to_check, 'w+') as fd:
                         fd.write(str(1))
             except Exception as e:
                 logging.exception(e)
             result = service.stopTasks(self)
             if result and result[self.cloudDatabaseID]:
                 final_cloud_result = result[self.cloudDatabaseID]
                 try:
                     self.outputURL = final_cloud_result['output']
                 except KeyError:
                     pass
                 self.status = "Finished"
                 self.put()
                 return True
             else:
                 # Something went wrong
                 logging.error(result)
                 return False
         else:
             raise Exception("Unknown job resource '{0}'".format(
                 self.resource))
Beispiel #23
0
    def get(self):
        """ """
        env_variables = self.user_data.env_variables
        if env_variables == None:
            context = {}
        else:
            context = json.loads(env_variables)

        logging.info(context)

        # Check if there is an internet connection available
        if internet_on():
            # Check if updates available. Assume a divergent branch can be updated. This is actually false, but we'll go with it. We need to change this so the user can make local changes...
            h = subprocess.Popen("git remote update".split(), stdout=subprocess.PIPE, stderr=subprocess.PIPE)
            stdout, stderr = h.communicate()

            h = subprocess.Popen("git status -uno".split(), stdout=subprocess.PIPE, stderr=subprocess.PIPE)
            stdout, stderr = h.communicate()

            update_available = re.search("behind", stdout)

            if update_available:
                service = backendservices()

                all_stochkit_jobs = db.GqlQuery(
                    "SELECT * FROM StochKitJobWrapper WHERE user_id = :1", self.user.email_address
                )

                # Check to make sure no jobs are running
                context["nojobs"] = True
                if all_stochkit_jobs != None:
                    for job in all_stochkit_jobs.run():
                        stochkit_job = job.stochkit_job
                        if not stochkit_job.status == "Finished":
                            res = service.checkTaskStatusLocal([stochkit_job.pid])
                            if res[stochkit_job.pid]:
                                context["nojobs"] = False

                context["update"] = True
            else:
                context["update"] = False

            context["nointernet"] = False
        else:
            context["nointernet"] = True

        self.render_response("updates.html", **context)
Beispiel #24
0
    def stop(self, handler):
        # TODO: Call the backend to kill and delete the job and all associated files.
        service = backendservices(handler.user_data)

        if self.resource == "molns":
            molnsConfigDb = db.GqlQuery("SELECT * FROM MolnsConfigWrapper WHERE user_id = :1", handler.user.user_id()).get()

            if not molnsConfigDb:
                return

            config = molns.MOLNSConfig(config_dir = molnsConfigDb.folder)

            # Stopping is deleting cloud data for this job type
            try:
                molns.MOLNSExec.cleanup_job([self.molnsPID], config)
            except Exception as e:
                logging.info("Error while deleting cloud data: {0}".format(e))
Beispiel #25
0
    def delete(self, handler):
        self.stop(handler)
        
        if self.outData is not None and os.path.exists(self.outData):
            shutil.rmtree(self.outData)

        if self.zipFileName is not None and os.path.exists(self.zipFileName):
            os.remove(self.zipFileName)

        if self.resource is not None and self.resource in backendservices.SUPPORTED_CLOUD_RESOURCES:
            try:
                service = backendservices(handler.user_data)
                service.deleteTasks(self)
            except Exception as e:
                logging.error("Failed to delete cloud resources of job {0}".format(self.key().id()))
                logging.error(e)

        super(SensitivityJobWrapper, self).delete()
Beispiel #26
0
 def isOneOrMoreComputeNodesRunning(self, credentials):
     """
     Checks for the existence of running compute nodes. Only need one running compute node
     to be able to run a job in the cloud.
     """
     try:
         service = backendservices()
         params = {"infrastructure": "ec2", "credentials": credentials}
         all_vms = service.describeMachines(params)
         if all_vms == None:
             return False
         # Just need one running vm
         for vm in all_vms:
             if vm != None and vm["state"] == "running":
                 return True
         return False
     except:
         return False
Beispiel #27
0
 def stop(self, handler):
     if self.status == "Running" or self.status == "Pending":
         service = backendservices(handler.user_data)
         if self.resource is not None and self.resource.lower() == "local":
             service.stopTaskLocal([int(self.pid)])
         elif self.resource in backendservices.SUPPORTED_CLOUD_RESOURCES:
             # Write the finalized file
             if self.outData is None or not os.path.exists(self.outData):
                 self.outData = os.path.abspath(
                     os.path.dirname(os.path.abspath(__file__))+'/../output/'+self.cloudDatabaseID
                 )
                 try:
                     logging.debug('stochoptim_job.stop() outData is None, makeing direcotry = {0}'.format(self.outData))
                     os.mkdir(self.outData)
                 except Exception as e:
                     logging.exception(e)
                     #TODO, comment out above
                     #pass
             else:
                 logging.debug('stochoptim_job.stop() outData is not None, = {0}'.format(self.outData))
             try:
                 file_to_check = "{0}/return_code".format(self.outData)
                 if not os.path.exists(file_to_check):
                     with open(file_to_check,'w+') as fd:
                         fd.write(str(1))
             except Exception as e:
                 logging.exception(e)
             result = service.stopTasks(self)
             if result and result[self.cloudDatabaseID]:
                 final_cloud_result = result[self.cloudDatabaseID]
                 try:
                     self.outputURL = final_cloud_result['output']
                 except KeyError:
                     pass
                 self.status = "Finished"
                 self.put()
                 return True
             else:
                 # Something went wrong
                 logging.error(result)
                 return False
         else:
             raise Exception("Unknown job resource '{0}'".format(self.resource))
    def stop(self, handler):
        # TODO: Call the backend to kill and delete the job and all associated files.
        service = backendservices(handler.user_data)

        if self.resource == "molns":
            molnsConfigDb = db.GqlQuery(
                "SELECT * FROM MolnsConfigWrapper WHERE user_id = :1",
                handler.user.user_id()).get()

            if not molnsConfigDb:
                return

            config = molns.MOLNSConfig(config_dir=molnsConfigDb.folder)

            # Stopping is deleting cloud data for this job type
            try:
                molns.MOLNSExec.cleanup_job([self.molnsPID], config)
            except Exception as e:
                logging.info("Error while deleting cloud data: {0}".format(e))
Beispiel #29
0
    def runCloud(self, data):
        '''
        '''
        job = SensitivityJobWrapper()
        job.resource = "cloud"
        job.userId = self.user.user_id()
        model = modeleditor.StochKitModelWrapper.get_by_id(data["id"])
        job.startTime = time.strftime("%Y-%m-%d-%H-%M-%S")
        job.jobName = data["jobName"]
        job.status = "Pending"
        job.modelName = model.model_name

        runtime = float(data["time"])
        dt = float(data["increment"])

        job.indata = json.dumps(data)

        parameters = []
        for parameter in data['selections']["pc"]:
            if data['selections']["pc"][parameter]:
                parameters.append(parameter)
        
        params = {
            "job_type": "sensitivity",
            "document": str( model.model.serialize() ),
            "paramstring": "stochkit_ode.py --sensi --parameters {0} -t {1} -i {2}".format( " ".join(parameters), runtime, int(runtime / dt)),
            "bucketname": self.user_data.getBucketName()
        }
        service = backendservices()
        db_credentials = self.user_data.getCredentials()
        # Set the environmental variables 
        os.environ["AWS_ACCESS_KEY_ID"] = db_credentials['EC2_ACCESS_KEY']
        os.environ["AWS_SECRET_ACCESS_KEY"] = db_credentials['EC2_SECRET_KEY']
        # Send the task to the backend
        cloud_result = service.executeTask(params)
        # if not cloud_result["success"]:
        job.cloudDatabaseID = cloud_result["db_id"]
        job.celeryPID = cloud_result["celery_pid"]
        job.outData = None
        job.zipFileName = None
        job.put()
        return job
Beispiel #30
0
    def addStochOptimJob(self, job, globalOp = False):
        jsonJob = { "version" : self.version,
                    "userId" : job.userId,
                    "pid" : job.pid,
                    "startTime" : job.startTime,
                    "jobName" : job.jobName,
                    "modelName" : job.modelName,
                    "indata" : json.loads(job.indata),
                    "nameToIndex" : json.loads(job.nameToIndex),
                    "outData" : job.outData,
                    "status" : job.status }
        
        # For cloud jobs, we need to include the output_url and possibly grab the results from S3
        if job.resource == 'cloud':
            jsonJob["output_url"] = job.outputURL
            # Only grab S3 data if user wants us to
            if (job.jobName in self.stochKitJobsToDownload) or globalOp:
                # Do we need to download it?
                if job.outData is None or (job.outData is not None and not os.path.exists(job.outData)):
                    # Grab the output from S3
                    service = backendservices()
                    service.fetchOutput(job.pid, job.outputURL)
                    # Unpack it to its local output location
                    os.system("tar -xf {0}.tar".format(job.pid))
                    # And update the db entry
                    job.outData = os.path.abspath(os.path.join(
                        os.path.dirname(os.path.abspath(__file__)),
                        "../output/{0}".format(job.pid)
                    ))
                    job.put()
                    os.remove("{0}.tar".format(job.pid))
        # Only add the folder if it actually exists
        if job.outData is not None and os.path.exists(job.outData):
            outputLocation = self.addFolder('stochOptimJobs/data/{0}'.format(job.jobName), job.outData)
            jsonJob["outData"] = outputLocation

        jsonJob["stdout"] = "{0}/stdout".format(outputLocation)
        jsonJob["stderr"] = "{0}/stderr".format(outputLocation)

        self.addBytes('stochOptimJobs/{0}.json'.format(job.jobName), json.dumps(jsonJob, sort_keys=True, indent=4, separators=(', ', ': ')))
Beispiel #31
0
 def stop(self, handler):
     if self.status == "Running":
         service = backendservices(handler.user_data)
         if self.resource == "local":
             service.stopTaskLocal([int(self.pid)])
         elif self.resource in backendservices.SUPPORTED_CLOUD_RESOURCES:
             result = service.stopTasks(self)
             if result and result[self.cloudDatabaseID]:
                 final_cloud_result = result[self.cloudDatabaseID]
                 try:
                     self.outputURL = final_cloud_result['output']
                 except KeyError:
                     pass
                 self.status = "Finished"
                 self.put()
                 return True
             else:
                 # Something went wrong
                 logging.error(result)
                 return False
         else:
             raise Exception('Job Resource {0} not supported!'.format(self.resource))
Beispiel #32
0
 def stop(self, handler):
     if self.status == "Running":
         service = backendservices(handler.user_data)
         if self.resource == "local":
             service.stopTaskLocal([int(self.pid)])
         elif self.resource in backendservices.SUPPORTED_CLOUD_RESOURCES:
             result = service.stopTasks(self)
             if result and result[self.cloudDatabaseID]:
                 final_cloud_result = result[self.cloudDatabaseID]
                 try:
                     self.outputURL = final_cloud_result['output']
                 except KeyError:
                     pass
                 self.status = "Finished"
                 self.put()
                 return True
             else:
                 # Something went wrong
                 logging.error(result)
                 return False
         else:
             raise Exception('Job Resource {0} not supported!'.format(
                 self.resource))
Beispiel #33
0
    def post(self):

        params = self.request.POST

        if 'delete' in params:

            # The jobs to delete are specified in the checkboxes
            jobs_to_delete = params.getall('select_job')

            service = backendservices(self.user_data)

            # Select the jobs to delete from the datastore
            result = {}
            for job_name in jobs_to_delete:
                try:
                    job = db.GqlQuery(
                        "SELECT * FROM StochKitJobWrapper WHERE user_id = :1 AND name = :2",
                        self.user.user_id(), job_name).get()
                except Exception, e:
                    result = {
                        'status':
                        False,
                        'msg':
                        "Could not retrieve the jobs" + job_name +
                        " from the datastore."
                    }

                job.delete()

            # Render the status page
            # AH: This is a hack to prevent the page from reloading before the datastore transactions
            # have taken place. I think it is only necessary for the SQLLite backend stub.
            # TODO: We need a better way to check if the entities are gone from the datastore...
            time.sleep(0.5)
            context = self.getContext()
            self.render_response('status.html', **dict(result, **context))
Beispiel #34
0
    def getContext(self):
        """ 
            Get the status of all the jobs that exist in the system and assemble a dict
            with info to display on the page. 
        """
        context = {}
        service = backendservices(self.user_data)
        # StochKit jobs
        all_stochkit_jobs = db.GqlQuery(
            "SELECT * FROM StochKitJobWrapper WHERE user_id = :1",
            self.user.user_id())
        all_jobs = []
        if all_stochkit_jobs != None:
            jobs = list(all_stochkit_jobs.run())
            jobs = sorted(
                jobs,
                key=lambda x:
                (datetime.datetime.strptime(x.startTime, '%Y-%m-%d-%H-%M-%S')
                 if hasattr(x, 'startTime') and x.startTime != None else
                 datetime.datetime.now()),
                reverse=True)
            for number, job in enumerate(jobs):
                number = len(jobs) - number
                all_jobs.append(
                    self.__process_getJobStatus(service, job, number))
        context['all_jobs'] = all_jobs

        # Sensitivity
        allSensJobs = []
        allSensQuery = db.GqlQuery(
            "SELECT * FROM SensitivityJobWrapper WHERE user_id = :1",
            self.user.user_id())
        if allSensQuery != None:
            jobs = list(allSensQuery.run())
            jobs = sorted(
                jobs,
                key=lambda x:
                (datetime.datetime.strptime(x.startTime, '%Y-%m-%d-%H-%M-%S')
                 if hasattr(x, 'startTime') and x.startTime != None else ''),
                reverse=True)
            for number, job in enumerate(jobs):
                number = len(jobs) - number
                allSensJobs.append(
                    self.__process_getJobStatus(service, job, number))
        context['allSensJobs'] = allSensJobs

        # Export
        allExportJobs = []
        exportJobsQuery = db.GqlQuery(
            "SELECT * FROM ExportJobWrapper WHERE user_id = :1",
            self.user.user_id())
        if exportJobsQuery != None:
            jobs = list(exportJobsQuery.run())
            jobs = sorted(
                jobs,
                key=lambda x:
                (datetime.datetime.strptime(x.startTime, '%Y-%m-%d-%H-%M-%S')
                 if hasattr(x, 'startTime') and x.startTime != None else ''),
                reverse=True)
            for number, job in enumerate(jobs):
                number = len(jobs) - number
                allExportJobs.append({
                    "startTime":
                    job.startTime,
                    "status":
                    job.status,
                    "number":
                    number,
                    "outData":
                    os.path.basename(job.outData if job.outData else ""),
                    "id":
                    job.key().id()
                })
        context['allExportJobs'] = allExportJobs

        # Parameter Estimation
        allParameterJobs = []
        allParameterJobsQuery = db.GqlQuery(
            "SELECT * FROM StochOptimJobWrapper WHERE user_id = :1",
            self.user.user_id())
        if allParameterJobsQuery != None:
            jobs = list(allParameterJobsQuery.run())
            jobs = sorted(
                jobs,
                key=lambda x:
                (datetime.datetime.strptime(x.startTime, '%Y-%m-%d-%H-%M-%S')
                 if hasattr(x, 'startTime') and x.startTime != None else ''),
                reverse=True)
            for number, job in enumerate(jobs):
                number = len(jobs) - number
                allParameterJobs.append(
                    self.__process_getJobStatus(service, job, number))
        context['allParameterJobs'] = allParameterJobs

        #Spatial Jobs
        allSpatialJobs = []
        allSpatialJobsQuery = db.GqlQuery(
            "SELECT * FROM SpatialJobWrapper WHERE user_id = :1",
            self.user.user_id())
        if allSpatialJobsQuery != None:
            jobs = list(allSpatialJobsQuery.run())
            jobs = sorted(
                jobs,
                key=lambda x:
                (datetime.datetime.strptime(x.startTime, '%Y-%m-%d-%H-%M-%S')
                 if hasattr(x, 'startTime') and x.startTime != None else ''),
                reverse=True)
            for number, job in enumerate(jobs):
                number = len(jobs) - number
                allSpatialJobs.append(
                    self.__process_getJobStatus(service, job, number))
        context['allSpatialJobs'] = allSpatialJobs

        return context
Beispiel #35
0
    def getContext(self):
        """ 
            Get the status of all the jobs that exist in the system and assemble a dict
            with info to display on the page. 
        """
        ##
        context = {}
        ##
        all_job_types = OrderedDict([('stochkit', 'Non-spatial Simulation'),
                                     ('spatial', 'Spatial Simulation'),
                                     ('sensitivity', 'Sensitivity'),
                                     ('parameter_estimation',
                                      'Parameter Estimation'),
                                     ('parameter_sweep', 'Parameter Sweep'),
                                     ('export', 'Data Export')])
        show_jobs = {}
        context['job_type_option_list'] = ""
        for k, v in all_job_types.iteritems():
            show_jobs[k] = True
            if 'job_type' in self.request.GET and self.request.GET[
                    'job_type'] == k:
                context[
                    'job_type_option_list'] += "<option value=\"{0}\" SELECTED>{1}</option>".format(
                        k, v)
            else:
                context[
                    'job_type_option_list'] += "<option value=\"{0}\">{1}</option>".format(
                        k, v)
        context['filter_value_div'] = 'inline'
        context['job_type_div'] = 'none'
        service = backendservices(self.user_data)
        ## setup filters
        filter_types = ['type', 'name', 'model', 'resource', 'status']
        if 'filter_value' in self.request.GET:
            filter_value = self.request.GET['filter_value']
            context['seleted_filter_value'] = filter_value
        if 'filter_type' in self.request.GET:
            for f in filter_types:
                context['seleted_filter_type_' + f] = ''
            filter_type = self.request.GET['filter_type']
            context['seleted_filter_type_' + filter_type] = 'SELECTED'
        SQL_where_clause = "WHERE user_id = :1"
        SQL_where_data = [self.user.user_id()]
        ## process filters
        for k, v in self.request.GET.iteritems():
            #if k == 'job_type' and v != "":
            #    for k,v in show_jobs.iteritems(): show_jobs[k]=False
            #    if v in show_jobs: show_jobs[v] = True
            if k == "filter_type" and v == "type":  # and 'job_type' in self.request.GET:
                job_filter = self.request.GET['job_type']
                if job_filter != '':
                    if job_filter in show_jobs:
                        for k, v in show_jobs.iteritems():
                            show_jobs[k] = False
                        show_jobs[job_filter] = True
                context['filter_value_div'] = 'none'
                context['job_type_div'] = 'inline'
            elif k == "filter_type" and v == "name" and filter_value != '':
                SQL_where_clause = "WHERE user_id = :1 AND name = :2"
                SQL_where_data = [self.user.user_id(), filter_value]
            elif k == "filter_type" and v == "model" and filter_value != '':
                SQL_where_clause = "WHERE user_id = :1 AND modelName = :2"
                SQL_where_data = [self.user.user_id(), filter_value]
            elif k == "filter_type" and v == "resource" and filter_value != '':
                SQL_where_clause = "WHERE user_id = :1 AND resource = :2"
                SQL_where_data = [self.user.user_id(), filter_value]
            elif k == "filter_type" and v == "status" and filter_value != '':
                SQL_where_clause = "WHERE user_id = :1 AND resource = :2"
                SQL_where_data = [self.user.user_id(), filter_value]

        all_jobs_together = []
        # StochKit jobs
        all_jobs = []
        if show_jobs['stochkit']:
            all_stochkit_jobs = db.GqlQuery(
                "SELECT * FROM StochKitJobWrapper {0}".format(
                    SQL_where_clause), *SQL_where_data)
            if all_stochkit_jobs != None:
                jobs = list(all_stochkit_jobs.run())
                jobs = sorted(jobs,
                              key=lambda x:
                              (datetime.datetime.strptime(
                                  x.startTime, '%Y-%m-%d-%H-%M-%S')
                               if hasattr(x, 'startTime') and x.startTime !=
                               None else datetime.datetime.now()),
                              reverse=True)
                for number, job in enumerate(jobs):
                    number = len(jobs) - number
                    all_jobs.append(
                        self.__process_getJobStatus(service, job, number))
        context['all_jobs'] = all_jobs
        all_jobs_together.extend(all_jobs)

        # Sensitivity
        allSensJobs = []
        if show_jobs['sensitivity']:
            allSensQuery = db.GqlQuery(
                "SELECT * FROM SensitivityJobWrapper {0}".format(
                    SQL_where_clause), *SQL_where_data)
            if allSensQuery != None:
                jobs = list(allSensQuery.run())
                jobs = sorted(jobs,
                              key=lambda x: (datetime.datetime.strptime(
                                  x.startTime, '%Y-%m-%d-%H-%M-%S')
                                             if hasattr(x, 'startTime') and x.
                                             startTime != None else ''),
                              reverse=True)
                for number, job in enumerate(jobs):
                    number = len(jobs) - number
                    allSensJobs.append(
                        self.__process_getJobStatus(service, job, number))
        context['allSensJobs'] = allSensJobs
        all_jobs_together.extend(allSensJobs)

        # Export
        allExportJobs = []
        if show_jobs['export']:
            exportJobsQuery = db.GqlQuery(
                "SELECT * FROM ExportJobWrapper {0}".format(SQL_where_clause),
                *SQL_where_data)
            if exportJobsQuery != None:
                jobs = list(exportJobsQuery.run())
                jobs = sorted(jobs,
                              key=lambda x: (datetime.datetime.strptime(
                                  x.startTime, '%Y-%m-%d-%H-%M-%S')
                                             if hasattr(x, 'startTime') and x.
                                             startTime != None else ''),
                              reverse=True)

                for number, job in enumerate(jobs):
                    number = len(jobs) - number
                    allExportJobs.append(
                        self.__process_getJobStatus(service, job, number))
        context['allExportJobs'] = allExportJobs
        all_jobs_together.extend(allExportJobs)

        # Parameter Estimation
        allParameterJobs = []
        if show_jobs['parameter_estimation']:
            allParameterJobsQuery = db.GqlQuery(
                "SELECT * FROM StochOptimJobWrapper {0}".format(
                    SQL_where_clause), *SQL_where_data)
            if allParameterJobsQuery != None:
                jobs = list(allParameterJobsQuery.run())
                jobs = sorted(jobs,
                              key=lambda x: (datetime.datetime.strptime(
                                  x.startTime, '%Y-%m-%d-%H-%M-%S')
                                             if hasattr(x, 'startTime') and x.
                                             startTime != None else ''),
                              reverse=True)
                for number, job in enumerate(jobs):
                    number = len(jobs) - number
                    allParameterJobs.append(
                        self.__process_getJobStatus(service, job, number))
        context['allParameterJobs'] = allParameterJobs
        all_jobs_together.extend(allParameterJobs)

        #Spatial Jobs
        allSpatialJobs = []
        if show_jobs['spatial']:
            allSpatialJobsQuery = db.GqlQuery(
                "SELECT * FROM SpatialJobWrapper {0}".format(SQL_where_clause),
                *SQL_where_data)
            if allSpatialJobsQuery != None:
                jobs = list(allSpatialJobsQuery.run())
                jobs = sorted(jobs,
                              key=lambda x: (datetime.datetime.strptime(
                                  x.startTime, '%Y-%m-%d-%H-%M-%S')
                                             if hasattr(x, 'startTime') and x.
                                             startTime != None else ''),
                              reverse=True)
                for number, job in enumerate(jobs):
                    number = len(jobs) - number
                    allSpatialJobs.append(
                        self.__process_getJobStatus(service, job, number))
        context['allSpatialJobs'] = allSpatialJobs
        all_jobs_together.extend(allSpatialJobs)

        #Parameter Sweep Jobs
        allParameterSweepJobs = []
        if show_jobs['parameter_sweep']:
            allParameterSweepJobsQuery = db.GqlQuery(
                "SELECT * FROM ParameterSweepJobWrapper {0}".format(
                    SQL_where_clause), *SQL_where_data)
            if allParameterSweepJobsQuery != None:
                jobs = list(allParameterSweepJobsQuery.run())
                jobs = sorted(jobs,
                              key=lambda x: (datetime.datetime.strptime(
                                  x.startTime, '%Y-%m-%d-%H-%M-%S')
                                             if hasattr(x, 'startTime') and x.
                                             startTime != None else ''),
                              reverse=True)
                for number, job in enumerate(jobs):
                    number = len(jobs) - number
                    allParameterSweepJobs.append(
                        self.__process_getJobStatus(service, job, number))
        context['allParameterSweepJobs'] = allParameterSweepJobs
        all_jobs_together.extend(allParameterSweepJobs)
        # Sort the jobs
        all_jobs_together.sort(
            key=lambda x:
            (datetime.datetime.strptime(x['startTime'], '%Y-%m-%d-%H-%M-%S')
             if 'startTime' in x and x['startTime'] != None else datetime.
             datetime.now()),
            reverse=True)
        context['all_jobs_together'] = all_jobs_together

        if 'time_zone_name' in self.request.cookies:
            #logging.info("time_zone_name = {0}".format(self.request.cookies.get('time_zone_name')))
            context['time_zone_name'] = "({0})".format(
                self.request.cookies.get('time_zone_name'))
        else:
            context['time_zone_name'] = '(UTC)'
        logging.info("STATUS: CONTEXT \n {0}".format(context))
        return context
Beispiel #36
0
    def runCloud(self, params):
        model = StochKitModelWrapper.get_by_id(
            params["id"]).createStochKitModel()

        if not model:
            raise Exception(
                'Failed to retrive the model \'{0}\' to simulate'.format(
                    params["id"]))

        #the parameter dictionary to be passed to the backend
        param = {}

        # Execute as concentration or population?
        exec_type = params['execType'].lower()

        if exec_type not in ["deterministic", "stochastic"]:
            raise Exception(
                'exec_type must be concentration or population. Found \'{0}\''.
                format(exec_type))

        if model.units.lower() == 'concentration' and exec_type.lower(
        ) == 'stochastic':
            raise Exception(
                'Concentration models cannot be executed Stochastically')

        executable = exec_type.lower()
        document = model.serialize()

        # Wow, what a hack

        if executable == 'deterministic' and model.units.lower(
        ) == 'population':
            model = StochMLDocument.fromString(document).toModel(model.name)

            for reactionN in model.getAllReactions():
                reaction = model.getAllReactions()[reactionN]
                if reaction.massaction:
                    if len(reaction.reactants
                           ) == 1 and reaction.reactants.values()[0] == 2:
                        reaction.marate.setExpression(
                            reaction.marate.expression + ' / 2')

        document = model.serialize()

        params['document'] = str(document)
        filepath = ""
        params['file'] = filepath
        ensemblename = params['jobName']
        stime = params['time']
        realizations = params['realizations']
        increment = params['increment']

        if int(params['seed']) < 0:
            random.seed()
            params['seed'] = random.randint(0, 2147483647)

        seed = params['seed']

        # Assemble the argument list
        args = ''
        args += ' -t '
        args += str(stime)
        num_output_points = str(int(float(stime) / float(increment)))
        args += ' -i ' + str(num_output_points)
        path = os.path.dirname(__file__)

        # Algorithm, SSA or Tau-leaping?
        if executable != 'deterministic':
            params['job_type'] = 'stochkit'
            executable = params['algorithm']

            args += ' --realizations '
            args += str(realizations)

            # We keep all the trajectories by default. The user can select to only store means and variance
            # through the advanced options.
            if not "only-moments" in params:
                args += ' --keep-trajectories'

            if "keep-histograms" in params:
                args += ' --keep-histograms'

            args += ' --seed '
            args += str(seed)
        else:
            params['job_type'] = 'stochkit_ode'
            executable = "stochkit_ode.py"

        # Columns need to be labeled for visulatization page to work.
        args += ' --label'

        cmd = executable + ' ' + args

        params['paramstring'] = cmd

        bucketname = self.user_data.getBucketName()
        params['bucketname'] = bucketname

        params['user_id'] = self.user.user_id()

        # Call backendservices and execute StochKit
        service = backendservices(self.user_data)

        cloud_result = service.submit_cloud_task(params)

        if not cloud_result["success"]:
            e = cloud_result["exception"]
            raise Exception('Cloud execution failed: {0}'.format(e))

        celery_task_id = cloud_result["celery_pid"]
        taskid = cloud_result["db_id"]

        # Create a StochKitJob instance
        job = StochKitJobWrapper()
        job.resource = cloud_result['resource']

        # stochkit_job.uuid = res['uuid']

        job.user_id = self.user.user_id()
        job.startTime = time.strftime("%Y-%m-%d-%H-%M-%S")
        job.name = params['jobName']
        job.modelName = model.name
        #job.pid = taskid
        job.celeryPID = celery_task_id
        job.cloudDatabaseID = taskid

        # Create a StochKitJob instance
        job.indata = json.dumps({
            "type": 'StochKit2 Ensemble',
            "final_time": params['time'],
            "realizations": params['realizations'],
            "increment": params['increment'],
            "seed": params['seed'],
            "exec_type": params['execType'],
            "units": model.units.lower(),
            "epsilon": params['epsilon'],
            "rTol": params['rTol'],
            "aTol": params['aTol'],
            "mxSteps": params['mxSteps'],
            "threshold": params['threshold']
        })

        job.output_stored = 'True'
        job.outData = None
        #job.stdout = '{0}/stdout'.format(dataDir)
        #job.stderr = '{0}/stderr'.format(dataDir)
        job.status = 'Running'
        job.put()

        return job
Beispiel #37
0
    def runCloud(self, data):
        self.user_data.set_selected(1)
        job = SensitivityJobWrapper()

        service = backendservices(self.user_data)

        if not service.isOneOrMoreComputeNodesRunning():
            raise Exception(
                'No cloud computing resources found. (Have they been started?)'
            )

        job.user_id = self.user.user_id()
        model = modeleditor.StochKitModelWrapper.get_by_id(data["id"])
        job.startTime = time.strftime("%Y-%m-%d-%H-%M-%S")
        job.name = data["jobName"]
        job.status = "Pending"
        job.modelName = model.name

        runtime = float(data["time"])
        dt = float(data["increment"])

        job.indata = json.dumps(data)

        parameters = []
        for parameter in data['selections']["pc"]:
            if data['selections']["pc"][parameter]:
                parameters.append(parameter)
        stochkitmodel = model.createStochKitModel()

        # Wow, what a hack
        if stochkitmodel.units.lower() == 'population':
            document = stochkitmodel.serialize()

            stochkitmodel = StochMLDocument.fromString(document).toModel(
                model.name)

            for reactionN in stochkitmodel.getAllReactions():
                reaction = stochkitmodel.getAllReactions()[reactionN]
                if reaction.massaction:
                    if len(reaction.reactants
                           ) == 1 and reaction.reactants.values()[0] == 2:
                        reaction.marate.setExpression(
                            reaction.marate.expression + ' / 2')

        params = {
            "job_type":
            "sensitivity",
            "document":
            str(stochkitmodel.serialize()),
            "paramstring":
            "stochkit_ode.py --sensi --parameters {0} -t {1} -i {2}".format(
                " ".join(parameters), runtime, int(runtime / dt)),
            "bucketname":
            self.user_data.getBucketName()
        }

        # Send the task to the backend
        cloud_result = service.submit_cloud_task(params)

        # if not cloud_result["success"]:
        if not cloud_result["success"]:
            return None, cloud_result

        job.cloudDatabaseID = cloud_result["db_id"]
        job.celeryPID = cloud_result["celery_pid"]
        job.resource = cloud_result['resource']
        job.outData = None
        job.zipFileName = None
        job.output_stored = 'True'
        job.put()
        return job
Beispiel #38
0
    def runCloud(self, data):
        self.user_data.set_selected(1)
        service = backendservices(self.user_data)
        if not service.isOneOrMoreComputeNodesRunning():
            raise Exception(
                'No cloud computing resources found. (Have they been started?)'
            )

        modelDb = StochKitModelWrapper.get_by_id(data["modelID"])

        berniemodel = StochOptimModel()

        success, msgs = berniemodel.fromStochKitModel(
            modelDb.createStochKitModel())

        if not success:
            raise Exception(msgs)

        path = os.path.abspath(os.path.dirname(__file__))

        basedir = path + '/../'
        dataDir = tempfile.mkdtemp(dir=basedir + 'output')

        job = StochOptimJobWrapper()
        job.user_id = self.user.user_id()
        job.startTime = time.strftime("%Y-%m-%d-%H-%M-%S")
        job.name = data["jobName"]
        job.indata = json.dumps(data)
        job.modelName = modelDb.name
        job.outData = dataDir
        job.status = "Pending"

        data["exec"] = "'bash'"

        data["steps"] = ("C" if data["crossEntropyStep"] else
                         "") + ("E" if data["emStep"] else
                                "") + ("U" if data["uncertaintyStep"] else "")

        # data["cores"] = 4
        data["options"] = ""

        cmd = "exec/mcem2.r --steps {steps} --seed {seed} --K.ce {Kce} --K.em {Kem} --K.lik {Klik} --K.cov {Kcov} --rho {rho} --perturb {perturb} --alpha {alpha} --beta {beta} --gamma {gamma} --k {k} --pcutoff {pcutoff} --qcutoff {qcutoff} --numIter {numIter} --numConverge {numConverge} --command {exec}".format(
            **data)
        # cmd = "exec/mcem2.r --K.ce 1000 --K.em 100 --rho .01 --pcutoff .05"
        stringModel, nameToIndex = berniemodel.serialize(
            data["activate"], True)
        job.nameToIndex = json.dumps(nameToIndex)

        jFileData = fileserver.FileManager.getFile(self,
                                                   data["trajectoriesID"],
                                                   noFile=False)
        iFileData = fileserver.FileManager.getFile(self,
                                                   data["initialDataID"],
                                                   noFile=False)

        job.put()

        cloud_params = {
            "job_id": job.key().id(),
            "job_type": "mcem2",
            # "cores": data["cores"],
            "paramstring": cmd,
            "model_file": stringModel,
            "model_data": {
                "content": self.addWeightColumnIfNecessary(iFileData["data"]),
                "extension": "txt"
            },
            "final_data": {
                "content": self.addWeightColumnIfNecessary(jFileData["data"]),
                "extension": "txt"
            },
            "key_prefix": self.user.user_id(),
            "credentials": self.user_data.getCredentials(),
            "bucketname": self.user_data.getBucketName()
        }

        # # execute cloud task
        try:
            service = backend.backendservice.backendservices(self.user_data)
            cloud_result = service.submit_cloud_task(params=cloud_params)

            if not cloud_result["success"]:
                raise Exception(cloud_result["reason"])

            job.cloudDatabaseID = cloud_result["db_id"]
            job.resource = cloud_result['resource']
            job.celeryPID = cloud_result["celery_pid"]
            #            job.pollProcessPID = int(cloud_result["poll_process_pid"])
            # job.pid = handle.pid
            job.put()
        except Exception as e:
            job.status = 'Failed'
            job.delete(self)
            raise

        return job
Beispiel #39
0
    def post(self):
        reqType = self.request.get('reqType')
        self.response.content_type = 'application/json'

        if reqType == 'newJob':
            data = json.loads(self.request.get('data'))
            logging.debug('data =\n{}'.format(pprint.pformat(data)))
            job = db.GqlQuery(
                "SELECT * FROM SpatialJobWrapper WHERE user_id = :1 AND name = :2",
                self.user.user_id(), data["jobName"].strip()).get()

            if job != None:
                self.response.write(
                    json.dumps({
                        "status": False,
                        "msg": "Job name must be unique"
                    }))
                return

            try:
                if data["resource"] == "local":
                    result = self.runLocal(data)
                elif data["resource"] == "cloud":
                    result = self.runCloud(data)
                elif data["resource"] == "qsub":
                    result = self.runQsubWrapper(data)
                else:
                    raise Exception("Unknown resource {0}".format(
                        data["resource"]))
                self.response.write(
                    json.dumps({
                        "status": True,
                        "msg": "Job launched",
                        "id": result.key().id()
                    }))
                return
            except Exception as e:
                logging.exception(e)
                result = {'status': False, 'msg': 'Error: {0}'.format(e)}
                self.response.write(json.dumps(result))
                return

        elif reqType == 'stopJob':
            jobID = json.loads(self.request.get('id'))
            jobID = int(jobID)
            job = SpatialJobWrapper.get_by_id(jobID)
            try:
                job.stop(self)
            except Exception as e:
                logging.execption(e)
                self.response.write(
                    json.dumps({
                        "status": False,
                        "msg": "Error: {0}".format(e)
                    }))
                return

        elif reqType == 'delJob':
            jobID = json.loads(self.request.get('id'))
            jobID = int(jobID)
            job = SpatialJobWrapper.get_by_id(jobID)
            try:
                job.delete(self)
            except Exception as e:
                logging.exception(e)
                self.response.write(
                    json.dumps({
                        "status": False,
                        "msg": "Error: {0}".format(e)
                    }))
                return

        elif reqType == 'getDataCloud':
            try:
                jobID = json.loads(self.request.get('id'))
                job = SpatialJobWrapper.get_by_id(int(jobID))
                service = backendservices(self.user_data)
                # Fetch
                service.fetchOutput(job)
                # Unpack
                os.system('tar -xf' + job.uuid + '.tar')
                # Record location
                job.outData = os.path.abspath(
                    os.path.dirname(__file__)) + '/../output/' + job.uuid
                # Clean up
                os.remove(job.uuid + '.tar')
                # Save the updated status
                job.put()
                self.response.headers['Content-Type'] = 'application/json'
                self.response.write(
                    json.dumps({
                        'status': True,
                        'msg': 'Job downloaded'
                    }))
                return
            except Exception as e:
                traceback.print_exc()
                self.response.write(
                    json.dumps({
                        "status": False,
                        "msg": "Error: {0}".format(e)
                    }))
                return

        elif reqType == 'getDataLocal':
            jobID = json.loads(self.request.get('id'))
            jobID = int(jobID)
            job = SpatialJobWrapper.get_by_id(jobID)
            if not job.zipFileName:
                szip = exportimport.SuperZip(os.path.abspath(
                    os.path.dirname(__file__) + '/../static/tmp/'),
                                             preferredName=job.name + "_")
                job.zipFileName = szip.getFileName()
                szip.addSpatialJob(job, True)
                szip.close()
                # Save the updated status
                job.put()
            relpath = '/' + os.path.relpath(
                job.zipFileName,
                os.path.abspath(os.path.dirname(__file__) + '/../'))
            self.response.headers['Content-Type'] = 'application/json'
            self.response.write(
                json.dumps({
                    'status': True,
                    'msg': 'Job downloaded',
                    'url': relpath
                }))
            return
        elif reqType == 'openJupyterNotebook' or reqType == 'redirectJupyterNotebook':
            try:
                jobID = json.loads(self.request.get('id'))
                job = SpatialJobWrapper.get_by_id(int(jobID))
                #Check if notebook already exists, if not create one
                notebook_filename = "{0}.ipynb".format(job.name)
                local_path = os.path.relpath(
                    os.path.abspath(job.outData),
                    os.path.abspath(__file__ + '/../../../'))
                notebook_file_path = os.path.abspath(
                    job.outData) + "/" + notebook_filename
                notebook_template_path = os.path.abspath(
                    __file__ +
                    '/../../../jupyter_notebook_templates') + "/Spatial.ipynb"
                if not os.path.isfile(notebook_file_path):
                    logging.info("Creating {0} from {1}".format(
                        notebook_file_path, notebook_template_path))
                    shutil.copyfile(notebook_template_path, notebook_file_path)

                if self.request.get('hostname') is not None:
                    host = self.request.get('hostname')
                else:
                    host = 'localhost'
                port = 9999
                proto = 'http'
                #
                # return the url of the notebook
                notebook_url = '{0}://{1}:{2}/notebooks/{3}/{4}'.format(
                    proto, host, port, local_path, notebook_filename)
                if reqType == 'openJupyterNotebook':
                    self.response.headers['Content-Type'] = 'application/json'
                    self.response.write(
                        json.dumps({
                            'status': True,
                            'msg': 'Notebook ready',
                            'url': notebook_url
                        }))
                else:
                    self.redirect(notebook_url)
            except Exception as e:
                logging.error("Error in openJupyterNotebook: {0}".format(e))
                if reqType == 'openJupyterNotebook':
                    self.response.headers['Content-Type'] = 'application/json'
                    self.response.write(
                        json.dumps({
                            'status': False,
                            'msg': 'error:{0}'.format(e)
                        }))
                else:
                    self.response.write('Error: {0}'.format(e))
            return
        elif reqType == 'getVtkLocal':

            def zipdir(path, ziph, prefix):
                # ziph is zipfile handle
                for root, dirs, files in os.walk(path):
                    for file in files:
                        ziph.write(
                            os.path.join(root, file),
                            os.path.join(
                                prefix,
                                os.path.relpath(os.path.join(root, file),
                                                path)))

            jobID = json.loads(self.request.get('id'))
            jobID = int(jobID)
            job = SpatialJobWrapper.get_by_id(jobID)
            if not job.vtkFileName:
                try:
                    tmpDir = None
                    indata = json.loads(job.indata)
                    tmpDir = tempfile.mkdtemp(dir=os.path.abspath(
                        os.path.dirname(__file__) + '/../static/tmp/'))
                    for trajectory in range(indata["realizations"]):
                        resultFile = open(
                            str(job.outData +
                                '/results/result{0}'.format(trajectory)))
                        result = pickle.load(resultFile)
                        resultFile.close()
                        for specie in result.model.listOfSpecies:
                            result.export_to_vtk(
                                specie,
                                os.path.join(
                                    tmpDir,
                                    "trajectory_{0}".format(trajectory),
                                    "species_{0}".format(specie)))

                    tmpFile = tempfile.NamedTemporaryFile(dir=os.path.abspath(
                        os.path.dirname(__file__) + '/../static/tmp/'),
                                                          prefix=job.name +
                                                          "_",
                                                          suffix='.zip',
                                                          delete=False)

                    zipf = zipfile.ZipFile(tmpFile, "w")
                    zipdir(tmpDir, zipf, os.path.basename(tmpFile.name))
                    zipf.close()
                    job.vtkFileName = tmpFile.name
                    tmpFile.close()
                    # Save the updated status
                    job.put()
                finally:
                    if tmpDir and os.path.exists(tmpDir):
                        logging.info("Getting cleaned up")
                        shutil.rmtree(tmpDir)

            relpath = '/' + os.path.relpath(
                job.vtkFileName,
                os.path.abspath(os.path.dirname(__file__) + '/../'))

            self.response.headers['Content-Type'] = 'application/json'
            self.response.write(
                json.dumps({
                    'status': True,
                    'msg': 'Job downloaded',
                    'url': relpath
                }))
            return
        elif reqType == 'getCsvLocal':

            def zipdir(path, ziph, prefix):
                # ziph is zipfile handle
                for root, dirs, files in os.walk(path):
                    for file in files:
                        ziph.write(
                            os.path.join(root, file),
                            os.path.join(
                                prefix,
                                os.path.relpath(os.path.join(root, file),
                                                path)))

            jobID = json.loads(self.request.get('id'))

            jobID = int(jobID)

            job = SpatialJobWrapper.get_by_id(jobID)

            if not job.csvFileName:
                try:
                    tmpDir = None

                    indata = json.loads(job.indata)

                    tmpDir = tempfile.mkdtemp(dir=os.path.abspath(
                        os.path.dirname(__file__) + '/../static/tmp/'))

                    for trajectory in range(indata["realizations"]):
                        resultFile = open(
                            str(job.outData +
                                '/results/result{0}'.format(trajectory)))
                        result = pickle.load(resultFile)
                        resultFile.close()

                        result.export_to_csv(
                            os.path.join(
                                tmpDir,
                                "trajectory_{0}".format(trajectory)).encode(
                                    'ascii', 'ignore'))

                    tmpFile = tempfile.NamedTemporaryFile(dir=os.path.abspath(
                        os.path.dirname(__file__) + '/../static/tmp/'),
                                                          prefix=job.name +
                                                          "_",
                                                          suffix='.zip',
                                                          delete=False)

                    zipf = zipfile.ZipFile(tmpFile, "w")
                    zipdir(tmpDir, zipf, os.path.basename(tmpFile.name))
                    zipf.close()

                    job.csvFileName = tmpFile.name

                    tmpFile.close()

                    # Save the updated status
                    job.put()
                finally:
                    if tmpDir and os.path.exists(tmpDir):
                        logging.info("Getting cleaned up")
                        shutil.rmtree(tmpDir)

            relpath = '/' + os.path.relpath(
                job.csvFileName,
                os.path.abspath(os.path.dirname(__file__) + '/../'))

            self.response.headers['Content-Type'] = 'application/json'
            self.response.write(
                json.dumps({
                    'status': True,
                    'msg': 'Job downloaded',
                    'url': relpath
                }))
            return

        self.response.write(
            json.dumps({
                'status':
                False,
                'msg':
                'Unknown Error processing request: no handler called'
            }))
Beispiel #40
0
    def post(self):
        """ Assemble the input to StochKit2 and submit the job (locally or via cloud). """

        reqType = self.request.get('reqType')

        if reqType == 'getFromCloud':
            job = StochKitJobWrapper.get_by_id(int(self.request.get('id')))

            service = backendservices(self.user_data)
            service.fetchOutput(job)
            
            # Unpack it to its local output location
            os.system('tar -xf {0}.tar'.format(job.cloudDatabaseID))
            job.outData = os.path.abspath('{0}/../output/{1}'.format(os.path.abspath(os.path.dirname(__file__)), job.cloudDatabaseID))

            job.stdout = os.path.join(job.outData, '/stdout.log')
            job.stderr = os.path.join(job.outData, '/stderr.log')

            # Clean up
            os.remove('{0}.tar'.format(job.cloudDatabaseID))

            # Save the updated status
            job.put()

            self.response.headers['Content-Type'] = 'application/json'
            self.response.write(json.dumps({ 'status' : True,
                                             'msg' : 'Job downloaded'}))
            return
        elif reqType == 'getDataLocal':
            job = StochKitJobWrapper.get_by_id(int(self.request.get('id')))

            if not job.zipFileName:
                szip = exportimport.SuperZip(os.path.abspath(os.path.dirname(__file__) + '/../static/tmp/'), preferredName = job.name + "_")
                
                job.zipFileName = szip.getFileName()

                szip.addStochKitJob(job, globalOp = True, ignoreStatus = True)
                
                szip.close()

                # Save the updated status
                job.put()
            
            
            relpath = os.path.relpath(job.zipFileName, os.path.abspath(os.path.dirname(__file__) + '/../'))

            self.response.headers['Content-Type'] = 'application/json'
            self.response.write(json.dumps({ 'status' : True,
                                             'msg' : 'Job downloaded',
                                             'url' : relpath }))
            return
        elif reqType == 'delJob':
            try:
                job = StochKitJobWrapper.get_by_id(int(self.request.get('id')))

                if job.user_id == self.user.user_id():
                    job.delete(self)
                    
                self.response.headers['Content-Type'] = 'application/json'
                self.response.write(json.dumps({ 'status' : True,
                                                 'msg' : "Job deleted from the datastore."}))
            except Exception as e:
                logging.exception(e)
                self.response.headers['Content-Type'] = 'application/json'
                self.response.write(json.dumps({ 'status' : False,
                                                 'msg' : "Error: {0}".format(e) }))

            return
        elif reqType == 'jobInfo':
            job = StochKitJobWrapper.get_by_id(int(self.request.get('id')))
            indata = json.loads(job.indata)

            if self.user.user_id() != job.user_id:
                self.response.headers['Content-Type'] = 'application/json'
                self.response.write(json.dumps(["Not the right user"]))

            if job.status == "Finished":
                try:
                    if (job.resource in backendservices.SUPPORTED_CLOUD_RESOURCES and job.output_stored == 'False') or (job.resource in backendservices.SUPPORTED_CLOUD_RESOURCES and job.outData is None):
                        self.response.headers['Content-Type'] = 'application/json'
                        self.response.write(json.dumps({ "status" : "Finished",
                                                         "values" : [],
                                                         "job" : JobManager.getJob(self, job.key().id())}))
                        return
                    else:
                        outputdir = job.outData
                        # Load all data from file in JSON format
                        if indata['exec_type'] == 'stochastic':
                            tid = self.request.get('tid')

                            if tid != '' and tid != 'mean':
                                outfile = '/result/trajectories/trajectory{0}.txt'.format(tid)

                                vhandle = open(outputdir + outfile, 'r')
                        
                                values = { 'time' : [], 'trajectories' : {} }
                                columnToList = []
                                for i, line in enumerate(vhandle):
                                    if i == 0:
                                        names = line.split()
                                        for name in names:
                                            if name == 'time':
                                                columnToList.append(values['time'])
                                            else:
                                                values['trajectories'][name] = [] # start a new timeseries for this name
                                                columnToList.append(values['trajectories'][name]) # Store a reference here for future use
                                    else:
                                        for storage, value in zip(columnToList, map(float, line.split())):
                                            storage.append(value)
                                vhandle.close()
                            else:
                                outfile = '/result/stats/means.txt'

                                vhandle = open(outputdir + outfile, 'r')

                                values = { 'time' : [], 'trajectories' : {} }
                                columnToList = []
                                for i, line in enumerate(vhandle):
                                    if i == 0:
                                        names = line.split()
                                        for name in names:
                                            if name == 'time':
                                                columnToList.append(values['time'])
                                            else:
                                                values['trajectories'][name] = [] # start a new timeseries for this name
                                                columnToList.append(values['trajectories'][name]) # Store a reference here for future use
                                    else:
                                        for storage, value in zip(columnToList, map(float, line.split())):
                                            storage.append(value)
                                vhandle.close()
                        else:
                            outfile = '/result/output.txt'
                            values = { 'time' : [], 'trajectories' : {} }

                            #if not os.path.isfile(outputdir + outfile):

                            vhandle = open(outputdir + outfile, 'r')

                            columnToList = []
                            for i, line in enumerate(vhandle):
                                if i == 0:
                                    continue
                                elif i == 1:
                                    names = line.split()
                                    for name in names:
                                        if name == 'time':
                                            columnToList.append(values['time'])
                                        else:
                                            values['trajectories'][name] = [] # start a new timeseries for this name
                                            columnToList.append(values['trajectories'][name]) # Store a reference here for future use
                                elif i == 2:
                                    continue
                                elif i == 3:
                                    for storage, value in zip(columnToList, map(float, line.split())):
                                        storage.append(value)
                                elif i == 4:
                                    continue
                                else:
                                    for storage, value in zip(columnToList, map(float, line.split())):
                                        storage.append(value)
                            vhandle.close()

                    self.response.headers['Content-Type'] = 'application/json'
                    result = {"status" : "Finished",
                              "values" : values,
                              "job" : JobManager.getJob(self, job.key().id())}
                    logging.debug("result = \n\n{}".format(result))
                    self.response.write(json.dumps(result))
                    return

                except Exception as e:
                    traceback.print_exc()
                    job.status = "Failed"
                    job.put()
                    logging.error("Failed to parse output data. Assuming job failed and continuing")
            
            if job.status == "Failed":
                self.response.headers['Content-Type'] = 'application/json'

                stdout = ""
                stderr = ""
                
                if job.outData is not None:
                    if os.path.isfile(job.outData + '/stdout'):
                        fstdoutHandle = open(job.outData + '/stdout', 'r')
                    else:
                        fstdoutHandle = open(job.outData + '/stdout.log', 'r')
                    stdout = fstdoutHandle.read()
                    fstdoutHandle.close()

                    if os.path.isfile(job.outData + '/stderr'):
                        fstderrHandle = open(job.outData + '/stderr', 'r')
                    else:
                        fstderrHandle = open(job.outData + '/stderr.log', 'r')
                    stderr = fstderrHandle.read()
                    fstderrHandle.close()

                self.response.write(json.dumps({ "status" : "Failed",
                                                 "job" : JobManager.getJob(self, job.key().id()),
                                                 "stdout" : stdout,
                                                 "stderr" : stderr}))
            else:
                self.response.headers['Content-Type'] = 'application/json'
                self.response.write(json.dumps({ "status" : "asdfasfdfdsa" }))
        else:
            # Params is a dict that constains all response elements of the form
            params = json.loads(self.request.get('data'))

            self.response.headers['Content-Type'] = 'application/json'
            job = db.GqlQuery("SELECT * FROM StochKitJobWrapper WHERE user_id = :1 AND name = :2",
                              self.user.user_id(), params["jobName"].strip()).get()

            if job != None:
                self.response.write(json.dumps({"status" : False,
                                                "msg" : "Job name must be unique"}))
                return
            
            backend_services = backendservices(self.user_data)

            # Create a stochhkit_job instance
            try:
                if params['resource'] == "local":
                    job = self.runStochKitLocal(params)
                elif params['resource'] == 'cloud':
                    job = self.runCloud(params)
                else:
                    raise Exception("Unknown resource {0}".format(params["resource"]))
                self.response.headers['Content-Type'] = 'application/json'
                self.response.write(json.dumps( { "status" : True,
                                                  "msg" : "Job launched",
                                                  "id" : job.key().id() } ))
            except Exception as e:
                self.response.headers['Content-Type'] = 'application/json'
                self.response.write(json.dumps( { "status" : False,
                                                  "msg" : str(e) } ))
Beispiel #41
0
    def runCloud(self, params):
        model = StochKitModelWrapper.get_by_id(params["id"]).createStochKitModel()

        if not model:
            raise Exception('Failed to retrive the model \'{0}\' to simulate'.format(params["id"]))

        #the parameter dictionary to be passed to the backend
        param = {}

        # Execute as concentration or population?
        exec_type = params['execType'].lower()

        if exec_type not in ["deterministic", "stochastic"]:
            raise Exception('exec_type must be concentration or population. Found \'{0}\''.format(exec_type))

        if model.units.lower() == 'concentration' and exec_type.lower() == 'stochastic':
            raise Exception('Concentration models cannot be executed Stochastically' )

        executable = exec_type.lower()
        document = model.serialize()

        # Wow, what a hack

        if executable == 'deterministic' and model.units.lower() == 'population':
            model = StochMLDocument.fromString(document).toModel(model.name)

            for reactionN in model.getAllReactions():
                reaction = model.getAllReactions()[reactionN]
                if reaction.massaction:
                    if len(reaction.reactants) == 1 and reaction.reactants.values()[0] == 2:
                        reaction.marate.setExpression(reaction.marate.expression + ' / 2')
            
        document = model.serialize()

        params['document']=str(document)
        filepath = ""
        params['file'] = filepath
        ensemblename = params['jobName']
        stime = params['time']
        realizations = params['realizations']
        increment = params['increment']

        if int(params['seed']) < 0:
            random.seed()
            params['seed'] = random.randint(0, 2147483647)

        seed = params['seed']

        # Assemble the argument list
        args = ''
        args+=' -t '
        args+=str(stime)
        num_output_points = str(int(float(stime)/float(increment)))
        args+=' -i ' + str(num_output_points)
        path = os.path.dirname(__file__)

        # Algorithm, SSA or Tau-leaping?
        if executable != 'deterministic':
            params['job_type'] = 'stochkit'
            executable = params['algorithm']
            
            args+=' --realizations '
            args+=str(realizations)
            
            # We keep all the trajectories by default. The user can select to only store means and variance
            # through the advanced options.
            if not "only-moments" in params:
                args+=' --keep-trajectories'
                
            if "keep-histograms" in params:
                args+=' --keep-histograms'
                
            args+=' --seed '
            args+=str(seed)
        else:
            params['job_type'] = 'stochkit_ode'
            executable = "stochkit_ode.py"

        # Columns need to be labeled for visulatization page to work.  
        args += ' --label'
        
        cmd = executable+' '+args
        
        params['paramstring'] = cmd
        
        bucketname = self.user_data.getBucketName()
        params['bucketname'] = bucketname  
        
        params['user_id'] = self.user.user_id()       
        
        # Call backendservices and execute StochKit
        service = backendservices(self.user_data)

        cloud_result = service.submit_cloud_task(params)

        if not cloud_result["success"]:
            e = cloud_result["exception"]
            raise Exception('Cloud execution failed: {0}'.format(e))
            
        celery_task_id = cloud_result["celery_pid"]
        taskid = cloud_result["db_id"]

        # Create a StochKitJob instance
        job = StochKitJobWrapper()
        job.resource = cloud_result['resource']
        
        # stochkit_job.uuid = res['uuid']
            
        
        
        job.user_id = self.user.user_id()
        job.startTime = time.strftime("%Y-%m-%d-%H-%M-%S")
        job.name = params['jobName']
        job.modelName = model.name
        #job.pid = taskid
        job.celeryPID = celery_task_id
        job.cloudDatabaseID = taskid

        # Create a StochKitJob instance
        job.indata = json.dumps({ "type" : 'StochKit2 Ensemble',
                                  "final_time" : params['time'],
                                  "realizations" : params['realizations'],
                                  "increment" : params['increment'],
                                  "seed" : params['seed'],
                                  "exec_type" : params['execType'],
                                  "units" : model.units.lower(),
                                  "epsilon" : params['epsilon'],
                                  "threshold" : params['threshold'] })

        job.output_stored = 'True'
        job.outData = None
        #job.stdout = '{0}/stdout'.format(dataDir)
        #job.stderr = '{0}/stderr'.format(dataDir)
        job.status = 'Running'
        job.put()

        return job
Beispiel #42
0
    def getContext(self):
        """ 
            Get the status of all the jobs that exist in the system and assemble a dict
            with info to display on the page. 
        """
        context = {}
        result = {}
        service = backendservices()
        # Grab references to all the user's StochKitJobs in the system
        all_stochkit_jobs = db.GqlQuery("SELECT * FROM StochKitJobWrapper WHERE user_id = :1", self.user.user_id())
        all_jobs = []
        if all_stochkit_jobs != None:
            # We want to display the name of the job and the status of the Job.
            status = {}

            jobs = list(all_stochkit_jobs.run())

            jobs = sorted(jobs, key = lambda x : (datetime.datetime.strptime(x.startDate, '%Y-%m-%d-%H-%M-%S') if hasattr(x, 'startDate') and x.startDate != None else datetime.datetime.now()), reverse = True)

            for number, job in enumerate(jobs):
                number = len(jobs) - number

                # Get the job id
                stochkit_job = job.stochkit_job
                
                # Query the backend for the status of the job, but only if the current status is not Finished
                if not stochkit_job.status == "Finished":
                    try:
                        if stochkit_job.resource == 'Local':
                            # First, check if the job is still running
                            res = service.checkTaskStatusLocal([stochkit_job.pid])
                            if res[stochkit_job.pid]:
                                stochkit_job.status = "Running"
                            else:
                                # Check if the signature file is present, that will always be the case for a sucessful job.
                                # for ssa and tau leaping, this is means.txt
                                # for ode, this is output.txt

                                if stochkit_job.exec_type == 'stochastic':
                                    file_to_check = stochkit_job.output_location+"/result/stats/means.txt"
                                else:
                                    file_to_check = stochkit_job.output_location+"/result/output.txt"
                                
                                if os.path.exists(file_to_check):
                                    stochkit_job.status = "Finished"
                                else:
                                    stochkit_job.status = "Failed"
                
                        elif stochkit_job.resource == 'Cloud':
                            # Retrive credentials from the datastore
                            if not self.user_data.valid_credentials:
                                return {'status':False,'msg':'Could not retrieve the status of job '+stochkit_job.name +'. Invalid credentials.'}
                            credentials = self.user_data.getCredentials()

                            # Check the status on the remote end
                            taskparams = {'AWS_ACCESS_KEY_ID':credentials['EC2_ACCESS_KEY'],'AWS_SECRET_ACCESS_KEY':credentials['EC2_SECRET_KEY'],'taskids':[stochkit_job.pid]}
                            task_status = service.describeTask(taskparams)
                            job_status = task_status[stochkit_job.pid]
                            # It frequently happens that describeTasks return None before the job is finsihed.
                            if job_status == None:
                                stochkit_job.status = "Unknown"
                            else:

                                if job_status['status'] == 'finished':
                                    # Update the stochkit job 
                                    stochkit_job.status = 'Finished'
                                    stochkit_job.output_url = job_status['output']
                                    stochkit_job.uuid = job_status['uuid']
                                
                                elif job_status['status'] == 'failed':
                                    stochkit_job.status = 'Failed'
                                    stochkit_job.exception_message = job_status['message']
                                    # Might not have a uuid or output if an exception was raised early on or if there is just no output available
                                    try:
                                        stochkit_job.uuid = job_status['uuid']
                                        stochkit_job.output_url = job_status['output']
                                    except KeyError:
                                        pass
                                    
                                elif job_status['status'] == 'pending':
                                    stochkit_job.status = 'Pending'
                                else:
                                    # The state gives more fine-grained results, like if the job is being re-run, but
                                    #  we don't bother the users with this info, we just tell them that it is still running.  
                                    stochkit_job.status = 'Running'
                    
                    except Exception,e:
                        result = {'status':False,'msg':'Could not determine the status of the jobs.'+str(e)}                

                # Save changes to the status
                job.put()

                all_jobs.append({ "name" : stochkit_job.name,
                                  "status" : stochkit_job.status,
                                  "resource" : stochkit_job.resource,
                                  "execType" : stochkit_job.exec_type,
                                  "id" : job.key().id(),
                                  "number" : number})
Beispiel #43
0
    def runCloud(self, data):
        self.user_data.set_selected(1)
        service = backendservices(self.user_data)
        if not service.isOneOrMoreComputeNodesRunning():
            raise Exception('No cloud computing resources found. (Have they been started?)')

        modelDb = StochKitModelWrapper.get_by_id(data["modelID"])

        berniemodel = StochOptimModel()

        success, msgs = berniemodel.fromStochKitModel(modelDb.createStochKitModel())

        if not success:
            raise Exception(msgs)

        path = os.path.abspath(os.path.dirname(__file__))

        basedir = path + '/../'
        dataDir = tempfile.mkdtemp(dir = basedir + 'output')

        job = StochOptimJobWrapper()
        job.user_id = self.user.user_id()
        job.startTime = time.strftime("%Y-%m-%d-%H-%M-%S")
        job.name = data["jobName"]
        job.indata = json.dumps(data)
        job.modelName = modelDb.name
        job.outData = dataDir
        job.status = "Pending"

        data["exec"] = "'bash'"

        data["steps"] = ("C" if data["crossEntropyStep"] else "") + ("E" if data["emStep"] else "") + ("U" if data["uncertaintyStep"] else "")

        # data["cores"] = 4
        data["options"] = ""

        cmd = "exec/mcem2.r --steps {steps} --seed {seed} --K.ce {Kce} --K.em {Kem} --K.lik {Klik} --K.cov {Kcov} --rho {rho} --perturb {perturb} --alpha {alpha} --beta {beta} --gamma {gamma} --k {k} --pcutoff {pcutoff} --qcutoff {qcutoff} --numIter {numIter} --numConverge {numConverge} --command {exec}".format(**data)
        # cmd = "exec/mcem2.r --K.ce 1000 --K.em 100 --rho .01 --pcutoff .05"
        stringModel, nameToIndex = berniemodel.serialize(data["activate"], True)
        job.nameToIndex = json.dumps(nameToIndex)

        jFileData = fileserver.FileManager.getFile(self, data["trajectoriesID"], noFile = False)
        iFileData = fileserver.FileManager.getFile(self, data["initialDataID"], noFile = False)

        job.put()

        cloud_params = {
            "job_id" : job.key().id(),
            "job_type": "mcem2",
            # "cores": data["cores"],
            "paramstring": cmd,
            "model_file": stringModel,
            "model_data": {
                "content": self.addWeightColumnIfNecessary(iFileData["data"]),
                "extension": "txt"
            },
            "final_data": {
                "content": self.addWeightColumnIfNecessary(jFileData["data"]),
                "extension": "txt"
            },
            "key_prefix": self.user.user_id(),
            "credentials": self.user_data.getCredentials(),
            "bucketname": self.user_data.getBucketName()
        }

        # # execute cloud task
        try:
            service = backend.backendservice.backendservices(self.user_data)
            cloud_result = service.submit_cloud_task(params=cloud_params)
            
            if not cloud_result["success"]:
                raise Exception(cloud_result["reason"])
                
            job.cloudDatabaseID = cloud_result["db_id"]
            job.resource = cloud_result['resource']
            job.celeryPID = cloud_result["celery_pid"]
#            job.pollProcessPID = int(cloud_result["poll_process_pid"])
            # job.pid = handle.pid
            job.put()
        except Exception as e:
            job.status='Failed'
            job.delete(self)
            raise

        return job
Beispiel #44
0
    def post(self):       
        reqType = self.request.get('reqType')

        if reqType == "jobInfo":
            job = SensitivityJobWrapper.get_by_id(int(self.request.get('id')))
            
            jsonJob = { "userId" : job.userId,
                        "jobName" : job.jobName,
                        "startTime" : job.startTime,
                        "indata" : json.loads(job.indata),
                        "outData" : job.outData,
                        "status" : job.status,
                        "resource" : job.resource }

            if self.user.user_id() != job.userId:
                self.response.headers['Content-Type'] = 'application/json'
                self.response.write(json.dumps(["Not the right user"]))

            if job.status == "Finished":
                if job.resource == "cloud" and job.outData is None:
                    # Let the user decide if they want to download it
                    self.response.headers['Content-Type'] = 'application/json'
                    self.response.write(json.dumps({ "status" : "Finished",
                                                     "values" : [],
                                                     "job" : jsonJob}))
                    return
                outputdir = job.outData
                # Load all data from file in JSON format
                vhandle = open(outputdir + '/result/output.txt', 'r')
                values = { 'time' : [], 'trajectories' : {}, 'sensitivities' : {}, 'parameters' : {}}
                parameters = []
                columnToList = []
                for i, line in enumerate(vhandle):
                    if i == 0:
                        continue
                    elif i == 1:
                        names = line.split()

                        parameterNames = []

                        for name in names:
                            if ':' in name:
                                specie, parameter = name.split(':')
                                if parameter not in parameterNames:
                                    parameterNames.append(parameter)
                        
                        for name in names:
                            if name == 'time':
                                columnToList.append(values['time'])
                            elif ':' in name:
                                specie, parameter = name.split(':')

                                if specie not in values['sensitivities']:
                                    values['sensitivities'][specie] = {}

                                values['sensitivities'][specie][parameter] = [] # Make a new timeseries for sensitivity
                                columnToList.append(values['sensitivities'][specie][parameter]) # Store a reference here for future use
                            else:
                                values['trajectories'][name] = [] # start a new timeseries for this name
                                columnToList.append(values['trajectories'][name]) # Store a reference here for future use
                    elif i == 2:
                        parameters = map(float, line.split())
                    elif i == 3:
                        for storage, value in zip(columnToList, map(float, line.split())):
                            storage.append(value)
                    elif i == 4:
                        continue
                    else:
                        for storage, value in zip(columnToList, map(float, line.split())):
                            storage.append(value)
                vhandle.close()

                values['parameters'] = dict(zip(parameterNames, parameters))

                self.response.headers['Content-Type'] = 'application/json'
                self.response.write(json.dumps({ "status" : "Finished",
                                                 "values" : values,
                                                 "job" : jsonJob }))
            elif job.status == "Failed":
                self.response.headers['Content-Type'] = 'application/json'

                fstdoutHandle = open(job.outData + '/stdout', 'r')
                stdout = fstdoutHandle.read()
                fstdoutHandle.close()

                fstderrHandle = open(job.outData + '/stderr', 'r')
                stderr = fstderrHandle.read()
                fstderrHandle.close()

                self.response.write(json.dumps({ "status" : "Failed",
                                                 "stdout" : stdout,
                                                 "stderr" : stderr,
                                                 "job" : jsonJob}))
            else:
                self.response.headers['Content-Type'] = 'application/json'
                self.response.write(json.dumps({ "status" : "asdfasdf" }))
        elif reqType == "getFromCloud":
            job = SensitivityJobWrapper.get_by_id(int(self.request.get('id')))

            service = backendservices()
            service.fetchOutput(job.cloudDatabaseID, job.outputURL)
            # Unpack it to its local output location
            os.system('tar -xf' +job.cloudDatabaseID+'.tar')
            job.outData = os.path.dirname(os.path.abspath(__file__))+'/../output/'+job.cloudDatabaseID
            job.outData = os.path.abspath(job.outData)
            # jsonJob["outData"] = job.outData
            # Clean up
            os.remove(job.cloudDatabaseID+'.tar')
            # Update the db entry
            job.put()
            
            self.response.headers['Content-Type'] = 'application/json'
            self.response.write(json.dumps({ 'status' : True,
                                             'msg' : 'Job downloaded'}))
            return
        elif reqType == "getLocalData":
            job = SensitivityJobWrapper.get_by_id(int(self.request.get('id')))
            
            if not job.zipFileName:
                szip = exportimport.SuperZip(os.path.abspath(os.path.dirname(__file__) + '/../static/tmp/'), preferredName = job.jobName + "_")
                
                job.zipFileName = szip.getFileName()

                szip.addSensitivityJob(job, True)
                
                szip.close()

                # Save the updated status
                job.put()
            
            
            relpath = os.path.relpath(job.zipFileName, os.path.abspath(os.path.dirname(__file__) + '/../'))

            self.response.headers['Content-Type'] = 'application/json'
            self.response.write(json.dumps({ 'status' : True,
                                             'msg' : 'Job downloaded',
                                             'url' : relpath }))
            return

        elif reqType == "delJob":
            job = SensitivityJobWrapper.get_by_id(int(self.request.get('id')))

            if self.user.user_id() != job.userId:
                self.response.headers['Content-Type'] = 'application/json'
                self.response.write(json.dumps(["Not the right user"]))
                return

            job.delete()
            self.response.headers['Content-Type'] = 'application/json'
            self.response.write(json.dumps({ "status" : True,
                                             "msg" : "Job deleted"}));

        elif reqType == "newJob":
            data = json.loads(self.request.get('data'))

            job = db.GqlQuery("SELECT * FROM SensitivityJobWrapper WHERE userId = :1 AND jobName = :2", self.user.user_id(), data["jobName"].strip()).get()

            if job != None:
                self.response.write(json.dumps({"status" : False,
                                                "msg" : "Job name must be unique"}))
                return
            # Either local or cloud
            if data["resource"] == "local":
                job = self.runLocal(data)
            elif data["resource"] == "cloud":
                job = self.runCloud(data)
            else:
                return self.response.write(json.dumps({"status" : False,
                                            "msg" : "Unrecognized resource requested: {0}".format(data.resource)}))
            
            self.response.write(json.dumps({"status" : True,
                                            "msg" : "Job launched",
                                            "kind" : job.kind(),
                                            "id" : job.key().id()}))
        else:
            self.response.write(json.dumps({"status" : False,
                                            "msg" : "No data submitted"}))
Beispiel #45
0
    def post(self):
        reqType = self.request.get('reqType')
        self.response.content_type = 'application/json'
        logging.error('spaital post reqType={0}'.format(reqType))

        if reqType == 'newJob':
            data = json.loads(self.request.get('data'))
            logging.debug('data =\n{}'.format(pprint.pformat(data)))
            job = db.GqlQuery("SELECT * FROM SpatialJobWrapper WHERE user_id = :1 AND name = :2",
                              self.user.user_id(), data["jobName"].strip()).get()

            if job != None:
                self.response.write(json.dumps({"status" : False,
                                                "msg" : "Job name must be unique"}))
                return

            try:
                if data["resource"] == "local":
                    result = self.runLocal(data)
                elif data["resource"] == "cloud":
                    result = self.runCloud(data)
                else:
                    raise Exception("Unknown resource {0}".format(data["resource"]))
                self.response.write(json.dumps({"status" : True,
                                                "msg" : "Job launched",
                                                "id" : result.key().id()}))
                return
            except Exception as e:
                logging.exception(e)
                result = {'status':False,
                          'msg':'Error: {0}'.format(e)}
                self.response.write(json.dumps(result))
                return


        elif reqType == 'stopJob':
            jobID = json.loads(self.request.get('id'))
            jobID = int(jobID)
            job = SpatialJobWrapper.get_by_id(jobID)
            try:
                job.stop(self)
            except Exception as e:
                logging.execption(e)
                self.response.write(json.dumps({"status" : False,
                                                "msg" : "Error: {0}".format(e)}))
                return

        elif reqType == 'delJob':
            jobID = json.loads(self.request.get('id'))
            jobID = int(jobID)
            job = SpatialJobWrapper.get_by_id(jobID)
            try:
                job.delete(self)
            except Exception as e:
                logging.exception(e)
                self.response.write(json.dumps({"status" : False,
                                                    "msg" : "Error: {0}".format(e)}))
                return

        elif reqType == 'getDataCloud':
            try:
                jobID = json.loads(self.request.get('id'))
                job = SpatialJobWrapper.get_by_id(int(jobID))
                service = backendservices(self.user_data)
                # Fetch
                service.fetchOutput(job)
                # Unpack
                os.system('tar -xf' +job.uuid+'.tar')
                # Record location
                job.outData = os.path.abspath(os.path.dirname(__file__))+'/../output/'+job.uuid
                # Clean up
                os.remove(job.uuid+'.tar')
                # Save the updated status
                job.put()
                self.response.headers['Content-Type'] = 'application/json'
                self.response.write(json.dumps({ 'status' : True,
                                                 'msg' : 'Job downloaded'}))
                return
            except Exception as e:
                traceback.print_exc()
                self.response.write(json.dumps({"status" : False,
                                                "msg" : "Error: {0}".format(e)}))
                return

        elif reqType == 'getDataLocal':
            jobID = json.loads(self.request.get('id'))
            jobID = int(jobID)
            job = SpatialJobWrapper.get_by_id(jobID)
            if not job.zipFileName:
                szip = exportimport.SuperZip(os.path.abspath(os.path.dirname(__file__) + '/../static/tmp/'), preferredName = job.name + "_")
                job.zipFileName = szip.getFileName()
                szip.addSpatialJob(job, True)
                szip.close()
                # Save the updated status
                job.put()
            relpath = '/' + os.path.relpath(job.zipFileName, os.path.abspath(os.path.dirname(__file__) + '/../'))
            self.response.headers['Content-Type'] = 'application/json'
            self.response.write(json.dumps({ 'status' : True,
                                             'msg' : 'Job downloaded',
                                             'url' : relpath }))
            return
        elif reqType == 'openJupyterNotebook' or reqType == 'redirectJupyterNotebook':
            try:
                jobID = json.loads(self.request.get('id'))
                job = SpatialJobWrapper.get_by_id(int(jobID))
                #Check if notebook already exists, if not create one
                notebook_filename = "{0}.ipynb".format(job.name)
                local_path = os.path.relpath(os.path.abspath(job.outData), os.path.abspath(__file__+'/../../../'))
                notebook_file_path =  os.path.abspath(job.outData) + "/" + notebook_filename
                notebook_template_path = os.path.abspath(__file__+'/../../../jupyter_notebook_templates')+"/Spatial.ipynb"
                if not os.path.isfile(notebook_file_path):
                    logging.info("Creating {0} from {1}".format(notebook_file_path,notebook_template_path))
                    shutil.copyfile(notebook_template_path, notebook_file_path)


                if self.request.get('hostname') is not None:
                    host = self.request.get('hostname')
                else:
                    host = 'localhost'
                port = 9999
                proto = 'http'
                #
                # return the url of the notebook
                notebook_url = '{0}://{1}:{2}/notebooks/{3}/{4}'.format(proto,host,port,local_path,notebook_filename)
                if reqType == 'openJupyterNotebook':
                    self.response.headers['Content-Type'] = 'application/json'
                    self.response.write(json.dumps({ 'status' : True,
                                                     'msg' : 'Notebook ready',
                                                     'url' : notebook_url }))
                else:
                    self.redirect(notebook_url)
            except Exception as e:
                logging.error("Error in openJupyterNotebook: {0}".format(e))
                if reqType == 'openJupyterNotebook':
                    self.response.headers['Content-Type'] = 'application/json'
                    self.response.write(json.dumps({ 'status' : False,
                                                     'msg' : 'error:{0}'.format(e) }))
                else:
                    self.response.write('Error: {0}'.format(e))
            return    
        elif reqType == 'getVtkLocal':
            def zipdir(path, ziph, prefix):
                # ziph is zipfile handle
                for root, dirs, files in os.walk(path):
                    for file in files:
                        ziph.write(os.path.join(root, file), os.path.join(prefix, os.path.relpath(os.path.join(root, file), path)))

            jobID = json.loads(self.request.get('id'))
            jobID = int(jobID)
            job = SpatialJobWrapper.get_by_id(jobID)
            if not job.vtkFileName:
                try:
                    tmpDir = None
                    indata = json.loads(job.indata)
                    tmpDir = tempfile.mkdtemp(dir = os.path.abspath(os.path.dirname(__file__) + '/../static/tmp/'))
                    for trajectory in range(indata["realizations"]):
                        resultFile = open(str(job.outData + '/results/result{0}'.format(trajectory)))
                        result = pickle.load(resultFile)
                        resultFile.close()
                        for specie in result.model.listOfSpecies:
                            result.export_to_vtk(specie, os.path.join(tmpDir, "trajectory_{0}".format(trajectory), "species_{0}".format(specie)))

                    tmpFile = tempfile.NamedTemporaryFile(dir = os.path.abspath(os.path.dirname(__file__) + '/../static/tmp/'),
                                                          prefix = job.name + "_",
                                                          suffix = '.zip', delete = False)

                    zipf = zipfile.ZipFile(tmpFile, "w")
                    zipdir(tmpDir, zipf, os.path.basename(tmpFile.name))
                    zipf.close()
                    job.vtkFileName = tmpFile.name
                    tmpFile.close()
                    # Save the updated status
                    job.put()
                finally:
                    if tmpDir and os.path.exists(tmpDir):
                        logging.info("Getting cleaned up")
                        shutil.rmtree(tmpDir)
            
            relpath = '/' + os.path.relpath(job.vtkFileName, os.path.abspath(os.path.dirname(__file__) + '/../'))

            self.response.headers['Content-Type'] = 'application/json'
            self.response.write(json.dumps({ 'status' : True,
                                             'msg' : 'Job downloaded',
                                             'url' : relpath }))
            return
        elif reqType == 'getCsvLocal':
            def zipdir(path, ziph, prefix):
                # ziph is zipfile handle
                for root, dirs, files in os.walk(path):
                    for file in files:
                        ziph.write(os.path.join(root, file), os.path.join(prefix, os.path.relpath(os.path.join(root, file), path)))

            jobID = json.loads(self.request.get('id'))

            jobID = int(jobID)

            job = SpatialJobWrapper.get_by_id(jobID)

            if not job.csvFileName:
                try:
                    tmpDir = None

                    indata = json.loads(job.indata)

                    tmpDir = tempfile.mkdtemp(dir=os.path.abspath(os.path.dirname(__file__) + '/../static/tmp/'))

                    for trajectory in range(indata["realizations"]):
                        resultFile = open(str(job.outData + '/results/result{0}'.format(trajectory)))
                        result = pickle.load(resultFile)
                        resultFile.close()

                        result.export_to_csv(os.path.join(tmpDir, "trajectory_{0}".format(trajectory)).encode('ascii', 'ignore'))

                    tmpFile = tempfile.NamedTemporaryFile(dir = os.path.abspath(os.path.dirname(__file__) + '/../static/tmp/'),
                                                          prefix = job.name + "_",
                                                          suffix = '.zip', delete = False)

                    zipf = zipfile.ZipFile(tmpFile, "w")
                    zipdir(tmpDir, zipf, os.path.basename(tmpFile.name))
                    zipf.close()

                    job.csvFileName = tmpFile.name
                    
                    tmpFile.close()

                    # Save the updated status
                    job.put()
                finally:
                    if tmpDir and os.path.exists(tmpDir):
                        logging.info("Getting cleaned up")
                        shutil.rmtree(tmpDir)
            
            relpath = '/' + os.path.relpath(job.csvFileName, os.path.abspath(os.path.dirname(__file__) + '/../'))

            self.response.headers['Content-Type'] = 'application/json'
            self.response.write(json.dumps({ 'status' : True,
                                             'msg' : 'Job downloaded',
                                             'url' : relpath }))
            return


        self.response.write(json.dumps({ 'status' : False,
                                         'msg' : 'Unknown Error processing request: no handler called'}))
Beispiel #46
0
    def post(self):
        self.response.content_type = 'application/json'
        req_type = self.request.get('req_type')

        credentials = self.user_data.getCredentials()
        os.environ["AWS_ACCESS_KEY_ID"] = credentials['EC2_ACCESS_KEY']
        os.environ["AWS_SECRET_ACCESS_KEY"] = credentials['EC2_SECRET_KEY']
        access_key = credentials['EC2_ACCESS_KEY']
        secret_key = credentials['EC2_SECRET_KEY']

        if req_type == 'delOutput':
            uuid = self.request.get('uuid')
            logging.debug('delOutput: uuid={0}'.format(uuid))

            try:
                job_type = self.request.get('job_type')

                if job_type == 'stochkit':
                    job = db.GqlQuery(
                        "SELECT * FROM StochKitJobWrapper WHERE user_id = :1 AND cloudDatabaseID = :2",
                        self.user.user_id(), uuid).get()
                    job.output_stored = 'False'
                    job.put()
                elif job_type == 'sensitivity':
                    job = sensitivity.SensitivityJobWrapper.all().filter(
                        'user_id =',
                        self.user.user_id()).filter('cloudDatabaseID =',
                                                    uuid).get()
                    job.output_stored = 'False'
                    job.outData = None
                    job.put()
                elif job_type == 'spatial':
                    job = spatial.SpatialJobWrapper.all().filter(
                        'user_id =',
                        self.user.user_id()).filter('cloudDatabaseID =',
                                                    uuid).get()
                    job.output_stored = 'False'
                    job.outData = None
                    job.put()

                service = backendservices(self.user_data)
                service.deleteTaskOutput(job)

                # delete the local output if any
                output_path = os.path.join(os.path.dirname(__file__),
                                           '../output/')
                if os.path.exists(str(output_path) + uuid):
                    shutil.rmtree(str(output_path) + uuid)

                result = {
                    'status': True,
                    'msg': 'Output deleted successfully.'
                }
            except Exception as e:
                logging.error(e)
                result = {
                    'status': False,
                    'msg': 'Fail to delete output in the cloud: ' + str(e)
                }

            self.response.write(json.dumps(result))
            return

        elif req_type == 'rerun':

            service = backendservices(self.user_data)

            job_type = self.request.get('job_type')
            uuid = self.request.get('uuid')
            logging.debug('rerun: uuid={0}'.format(uuid))

            logging.info('job uuid: '.format(uuid))

            if not self.user_data.valid_credentials or not service.isOneOrMoreComputeNodesRunning(
            ):
                self.response.write(
                    json.dumps({
                        'status':
                        False,
                        'msg':
                        'There is no ' + instance_type +
                        ' node running. *Launch one node? '
                    }))
                return

            if job_type == 'stochkit':

                job = db.GqlQuery(
                    "SELECT * FROM StochKitJobWrapper WHERE user_id = :1 AND cloudDatabaseID = :2",
                    self.user.user_id(), uuid).get()

                try:
                    logging.info('start to rerun the job {0}'.format(
                        str(uuid)))
                    # Set up CloudTracker with user credentials and specified UUID to rerun the job
                    ct = CloudTracker(access_key, secret_key, str(uuid),
                                      self.user_data.getBucketName())
                    has_prov = not ct.if_tracking()
                    # If there is no provenance data for this job, report an error to the user
                    if not has_prov:
                        result = {
                            'status':
                            False,
                            'msg':
                            "The job with this ID does not exist or cannot be reproduced."
                        }
                        self.response.content_type = 'application/json'
                        self.response.write(json.dumps(result))
                        return

                    params = ct.get_input()
                    logging.info("OUT_PUT SIZE: {0}".format(
                        params['output_size']))

                    time = datetime.datetime.now()
                    params['rerun_uuid'] = uuid
                    cloud_result = service.submit_cloud_task(params=params)

                    if not cloud_result["success"]:
                        e = cloud_result["exception"]
                        result = {
                            'status': False,
                            'msg': 'Cloud execution failed: ' + str(e)
                        }
                        return result
                    # The celery_pid is the Celery Task ID.
                    job.celeryPID = cloud_result["celery_pid"]
                    job.status = 'Running'
                    job.outData = None
                    job.output_stored = 'True'

                    job.startDate = time.strftime("%Y-%m-%d-%H-%M-%S")

                    job.put()
                    result = {
                        'status': True,
                        'msg': 'Job rerun submitted successfully.'
                    }

                except Exception, e:
                    result = {
                        'status': False,
                        'msg': 'Cloud execution failed: ' + str(e)
                    }

                self.response.write(json.dumps(result))
                return

            elif job_type == 'sensitivity':
                job = sensitivity.SensitivityJobWrapper.all().filter(
                    'user_id =',
                    self.user.user_id()).filter('cloudDatabaseID =',
                                                uuid).get()

                try:
                    ct = CloudTracker(access_key, secret_key, str(uuid),
                                      self.user_data.getBucketName())
                    has_prov = not ct.if_tracking()
                    # If there is no provenance data for this job, report an error to the user
                    if not has_prov:
                        result = {
                            'status':
                            False,
                            'msg':
                            "The job with this ID does not exist or cannot be reproduced."
                        }
                        self.response.content_type = 'application/json'
                        self.response.write(json.dumps(result))
                        return

                    params = ct.get_input()

                    time = datetime.datetime.now()

                    # execute task in cloud
                    params['rerun_uuid'] = uuid
                    cloud_result = service.submit_cloud_task(params=params)

                    if not cloud_result["success"]:
                        e = cloud_result["exception"]
                        result = {
                            'status': False,
                            'msg': 'Cloud execution failed: ' + str(e)
                        }
                        return result

                    # The celery_pid is the Celery Task ID.
                    job.status = "Running"
                    job.celeryPID = cloud_result["celery_pid"]
                    job.startTime = time.strftime("%Y-%m-%d-%H-%M-%S")
                    job.output_stored = 'True'
                    job.put()
                    result = {
                        'status': True,
                        'msg': 'Job rerun submitted successfully.'
                    }

                except Exception, e:
                    result = {
                        'status': False,
                        'msg': 'Cloud execution failed: ' + str(e)
                    }

                self.response.write(json.dumps(result))
                return
Beispiel #47
0
    def post(self):
        """ Assemble the input to StochKit2 and submit the job (locally or via cloud). """

        reqType = self.request.get('reqType')

        if reqType == 'getFromCloud':
            job = StochKitJobWrapper.get_by_id(int(self.request.get('id')))

            service = backendservices(self.user_data)
            service.fetchOutput(job)

            # Unpack it to its local output location
            os.system('tar -xf {0}.tar'.format(job.cloudDatabaseID))
            job.outData = os.path.abspath('{0}/../output/{1}'.format(
                os.path.abspath(os.path.dirname(__file__)),
                job.cloudDatabaseID))

            job.stdout = os.path.join(job.outData, '/stdout.log')
            job.stderr = os.path.join(job.outData, '/stderr.log')

            # Clean up
            os.remove('{0}.tar'.format(job.cloudDatabaseID))

            # Save the updated status
            job.put()

            self.response.headers['Content-Type'] = 'application/json'
            self.response.write(
                json.dumps({
                    'status': True,
                    'msg': 'Job downloaded'
                }))
            return
        elif reqType == 'getDataLocal':
            job = StochKitJobWrapper.get_by_id(int(self.request.get('id')))

            if not job.zipFileName:
                szip = exportimport.SuperZip(os.path.abspath(
                    os.path.dirname(__file__) + '/../static/tmp/'),
                                             preferredName=job.name + "_")

                job.zipFileName = szip.getFileName()

                szip.addStochKitJob(job, globalOp=True, ignoreStatus=True)

                szip.close()

                # Save the updated status
                job.put()

            relpath = os.path.relpath(
                job.zipFileName,
                os.path.abspath(os.path.dirname(__file__) + '/../'))

            self.response.headers['Content-Type'] = 'application/json'
            self.response.write(
                json.dumps({
                    'status': True,
                    'msg': 'Job downloaded',
                    'url': relpath
                }))
            return
        elif reqType == 'delJob':
            try:
                job = StochKitJobWrapper.get_by_id(int(self.request.get('id')))

                if job.user_id == self.user.user_id():
                    job.delete(self)

                self.response.headers['Content-Type'] = 'application/json'
                self.response.write(
                    json.dumps({
                        'status': True,
                        'msg': "Job deleted from the datastore."
                    }))
            except Exception as e:
                logging.exception(e)
                self.response.headers['Content-Type'] = 'application/json'
                self.response.write(
                    json.dumps({
                        'status': False,
                        'msg': "Error: {0}".format(e)
                    }))

            return
        elif reqType == 'redirectJupyterNotebook':
            try:
                job = StochKitJobWrapper.get_by_id(int(self.request.get('id')))
                #Check if notebook already exists, if not create one
                notebook_filename = "{0}.ipynb".format(job.name)
                local_path = os.path.relpath(
                    os.path.abspath(job.outData),
                    os.path.abspath(__file__ + '/../../../'))
                notebook_file_path = os.path.abspath(
                    job.outData) + "/" + notebook_filename
                # TODO Deterministic or Stochastic template
                indata = json.loads(job.indata)
                if indata['exec_type'] == 'deterministic':
                    notebook_template_path = os.path.abspath(
                        __file__ + '/../../../jupyter_notebook_templates'
                    ) + "/Deterministic.ipynb"
                else:
                    notebook_template_path = os.path.abspath(
                        __file__ + '/../../../jupyter_notebook_templates'
                    ) + "/Stochastic.ipynb"
                if not os.path.isfile(notebook_file_path):
                    logging.info("Creating {0} from {1}".format(
                        notebook_file_path, notebook_template_path))
                    shutil.copyfile(notebook_template_path, notebook_file_path)

                if self.request.get('hostname') is not None:
                    logging.info('hostname={0}'.format(
                        self.request.get('hostname')))
                    host = self.request.get('hostname')
                else:
                    host = 'localhost'
                port = 9999
                proto = 'http'
                #
                # return the url of the notebook
                notebook_url = '{0}://{1}:{2}/notebooks/{3}/{4}'.format(
                    proto, host, port, local_path, notebook_filename)
                logging.info('redirect: {0}'.format(notebook_url))
                self.redirect(notebook_url)
            except Exception as e:
                logging.error("Error in openJupyterNotebook: {0}".format(e))
                self.response.write('Error: {0}'.format(e))
            return
        elif reqType == 'jobInfo':
            job = StochKitJobWrapper.get_by_id(int(self.request.get('id')))
            indata = json.loads(job.indata)

            if self.user.user_id() != job.user_id:
                self.response.headers['Content-Type'] = 'application/json'
                self.response.write(json.dumps(["Not the right user"]))

            if job.status == "Finished":
                try:
                    if (job.resource
                            in backendservices.SUPPORTED_CLOUD_RESOURCES
                            and job.output_stored == 'False') or (
                                job.resource
                                in backendservices.SUPPORTED_CLOUD_RESOURCES
                                and job.outData is None):
                        self.response.headers[
                            'Content-Type'] = 'application/json'
                        self.response.write(
                            json.dumps({
                                "status":
                                "Finished",
                                "values": [],
                                "job":
                                JobManager.getJob(self,
                                                  job.key().id())
                            }))
                        return
                    else:
                        outputdir = job.outData
                        # Load all data from file in JSON format
                        if indata['exec_type'] == 'stochastic':
                            tid = self.request.get('tid')

                            if tid != '' and tid != 'mean':
                                outfile = '/result/trajectories/trajectory{0}.txt'.format(
                                    tid)

                                vhandle = open(outputdir + outfile, 'r')

                                values = {'time': [], 'trajectories': {}}
                                columnToList = []
                                for i, line in enumerate(vhandle):
                                    if i == 0:
                                        names = line.split()
                                        for name in names:
                                            if name == 'time':
                                                columnToList.append(
                                                    values['time'])
                                            else:
                                                values['trajectories'][name] = [
                                                ]  # start a new timeseries for this name
                                                columnToList.append(
                                                    values['trajectories']
                                                    [name]
                                                )  # Store a reference here for future use
                                    else:
                                        for storage, value in zip(
                                                columnToList,
                                                map(float, line.split())):
                                            storage.append(value)
                                vhandle.close()
                            else:
                                outfile = '/result/stats/means.txt'

                                vhandle = open(outputdir + outfile, 'r')

                                values = {'time': [], 'trajectories': {}}
                                columnToList = []
                                for i, line in enumerate(vhandle):
                                    if i == 0:
                                        names = line.split()
                                        for name in names:
                                            if name == 'time':
                                                columnToList.append(
                                                    values['time'])
                                            else:
                                                values['trajectories'][name] = [
                                                ]  # start a new timeseries for this name
                                                columnToList.append(
                                                    values['trajectories']
                                                    [name]
                                                )  # Store a reference here for future use
                                    else:
                                        for storage, value in zip(
                                                columnToList,
                                                map(float, line.split())):
                                            storage.append(value)
                                vhandle.close()
                        else:
                            outfile = '/result/output.txt'
                            values = {'time': [], 'trajectories': {}}

                            #if not os.path.isfile(outputdir + outfile):

                            vhandle = open(outputdir + outfile, 'r')

                            columnToList = []
                            for i, line in enumerate(vhandle):
                                if i == 0:
                                    continue
                                elif i == 1:
                                    names = line.split()
                                    for name in names:
                                        if name == 'time':
                                            columnToList.append(values['time'])
                                        else:
                                            values['trajectories'][name] = [
                                            ]  # start a new timeseries for this name
                                            columnToList.append(
                                                values['trajectories'][name]
                                            )  # Store a reference here for future use
                                elif i == 2:
                                    continue
                                elif i == 3:
                                    for storage, value in zip(
                                            columnToList,
                                            map(float, line.split())):
                                        storage.append(value)
                                elif i == 4:
                                    continue
                                else:
                                    for storage, value in zip(
                                            columnToList,
                                            map(float, line.split())):
                                        storage.append(value)
                            vhandle.close()

                    self.response.headers['Content-Type'] = 'application/json'
                    result = {
                        "status": "Finished",
                        "values": values,
                        "job": JobManager.getJob(self,
                                                 job.key().id())
                    }
                    logging.debug("result = \n\n{}".format(result))
                    self.response.write(json.dumps(result))
                    return

                except Exception as e:
                    traceback.print_exc()
                    job.status = "Failed"
                    job.put()
                    logging.error(
                        "Failed to parse output data. Assuming job failed and continuing"
                    )

            if job.status == "Failed":
                self.response.headers['Content-Type'] = 'application/json'

                stdout = ""
                stderr = ""

                if job.outData is not None:
                    if os.path.isfile(job.outData + '/stdout'):
                        fstdoutHandle = open(job.outData + '/stdout', 'r')
                    else:
                        fstdoutHandle = open(job.outData + '/stdout.log', 'r')
                    stdout = fstdoutHandle.read()
                    fstdoutHandle.close()

                    if os.path.isfile(job.outData + '/stderr'):
                        fstderrHandle = open(job.outData + '/stderr', 'r')
                    else:
                        fstderrHandle = open(job.outData + '/stderr.log', 'r')
                    stderr = fstderrHandle.read()
                    fstderrHandle.close()

                self.response.write(
                    json.dumps({
                        "status": "Failed",
                        "job": JobManager.getJob(self,
                                                 job.key().id()),
                        "stdout": stdout,
                        "stderr": stderr
                    }))
            else:
                raise Exception(
                    'This page should never be accessed if job is not Finished or Running (current state of job {0} : {1})'
                    .format(job.id, job.status))
                traceback.print_exc()

                print job.status

                self.response.headers['Content-Type'] = 'application/json'
                self.response.write(
                    json.dumps({"status": "tttttttttasdfasfdfdsa"}))
        else:
            # Params is a dict that constains all response elements of the form
            params = json.loads(self.request.get('data'))

            self.response.headers['Content-Type'] = 'application/json'
            job = db.GqlQuery(
                "SELECT * FROM StochKitJobWrapper WHERE user_id = :1 AND name = :2",
                self.user.user_id(), params["jobName"].strip()).get()

            if job != None:
                self.response.write(
                    json.dumps({
                        "status": False,
                        "msg": "Job name must be unique"
                    }))
                return

            backend_services = backendservices(self.user_data)

            # Create a stochhkit_job instance
            try:
                if params['resource'] == "local":
                    job = self.runStochKitLocal(params)
                elif params['resource'] == 'cloud':
                    job = self.runCloud(params)
                elif params['resource'] == 'molns':
                    job = self.runMolns(params)
                else:
                    raise Exception("Unknown resource {0}".format(
                        params["resource"]))
                self.response.headers['Content-Type'] = 'application/json'
                self.response.write(
                    json.dumps({
                        "status": True,
                        "msg": "Job launched",
                        "id": job.key().id()
                    }))
            except Exception as e:
                traceback.print_exc()

                self.response.headers['Content-Type'] = 'application/json'
                self.response.write(
                    json.dumps({
                        "status": False,
                        "msg": str(e)
                    }))
Beispiel #48
0
    def post(self):
        reqType = self.request.get('reqType')

        if reqType == "jobInfo":
            job = SensitivityJobWrapper.get_by_id(int(self.request.get('id')))

            jsonJob = {
                "id": int(self.request.get('id')),
                "userId": job.user_id,
                "jobName": job.name,
                "startTime": job.startTime,
                "indata": json.loads(job.indata),
                "outData": job.outData,
                "status": job.status,
                "resource": job.resource,
                "uuid": job.cloudDatabaseID,
                "output_stored": job.output_stored,
                "modelName": job.modelName
            }

            if self.user.user_id() != job.user_id:
                self.response.headers['Content-Type'] = 'application/json'
                self.response.write(json.dumps(["Not the right user"]))

            if job.status == "Finished":
                if job.resource in backendservices.SUPPORTED_CLOUD_RESOURCES and job.outData is None:
                    # Let the user decide if they want to download it
                    self.response.headers['Content-Type'] = 'application/json'
                    self.response.write(
                        json.dumps({
                            "status": "Finished",
                            "values": [],
                            "job": jsonJob
                        }))
                    return
                outputdir = job.outData
                try:
                    # Load all data from file in JSON format
                    vhandle = open(outputdir + '/result/output.txt', 'r')
                    values = {
                        'time': [],
                        'trajectories': {},
                        'sensitivities': {},
                        'parameters': {}
                    }
                    parameters = []
                    columnToList = []
                    for i, line in enumerate(vhandle):
                        if i == 0:
                            continue
                        elif i == 1:
                            names = line.split()

                            parameterNames = []

                            for name in names:
                                if ':' in name:
                                    specie, parameter = name.split(':')
                                    if parameter not in parameterNames:
                                        parameterNames.append(parameter)

                            for name in names:
                                if name == 'time':
                                    columnToList.append(values['time'])
                                elif ':' in name:
                                    specie, parameter = name.split(':')

                                    if specie not in values['sensitivities']:
                                        values['sensitivities'][specie] = {}

                                    values[
                                        'sensitivities'][specie][parameter] = [
                                        ]  # Make a new timeseries for sensitivity
                                    columnToList.append(
                                        values['sensitivities'][specie]
                                        [parameter]
                                    )  # Store a reference here for future use
                                else:
                                    values['trajectories'][name] = [
                                    ]  # start a new timeseries for this name
                                    columnToList.append(
                                        values['trajectories'][name]
                                    )  # Store a reference here for future use
                        elif i == 2:
                            parameters = map(float, line.split())
                        elif i == 3:
                            for storage, value in zip(columnToList,
                                                      map(float,
                                                          line.split())):
                                storage.append(value)
                        elif i == 4:
                            continue
                        else:
                            for storage, value in zip(columnToList,
                                                      map(float,
                                                          line.split())):
                                storage.append(value)
                    vhandle.close()

                    values['parameters'] = dict(zip(parameterNames,
                                                    parameters))

                    self.response.headers['Content-Type'] = 'application/json'
                    self.response.write(
                        json.dumps({
                            "status": "Finished",
                            "values": values,
                            "job": jsonJob
                        }))
                    return
                except IOError as ioe:
                    logging.error("caught error {0}".format(ioe))
                    job.status = "Failed"
                    logging.error("put job.status = Failed")
                    job.put()

            if job.status == "Failed":
                self.response.headers['Content-Type'] = 'application/json'

                stdout = ''
                stderr = ''
                try:
                    fstdoutHandle = open(job.outData + '/stdout.log', 'r')
                    stdout = fstdoutHandle.read()
                    fstdoutHandle.close()
                    fstderrHandle = open(job.outData + '/stderr.log', 'r')
                    stderr = fstderrHandle.read()
                    fstderrHandle.close()
                except IOError as ioe:
                    logging.error(
                        "could not open error log files in {0}".format(
                            job.outData))

                self.response.write(
                    json.dumps({
                        "status": "Failed",
                        "stdout": stdout,
                        "stderr": stderr,
                        "job": jsonJob
                    }))
                return

            # else
            self.response.headers['Content-Type'] = 'application/json'
            self.response.write(json.dumps({"status": "asdfasdf"}))
        elif reqType == "getFromCloud":
            job = SensitivityJobWrapper.get_by_id(int(self.request.get('id')))

            service = backendservices(self.user_data)
            service.fetchOutput(job)
            # Unpack it to its local output location
            os.system('tar -xf' + job.cloudDatabaseID + '.tar')
            job.outData = os.path.dirname(os.path.abspath(
                __file__)) + '/../output/' + job.cloudDatabaseID
            job.outData = os.path.abspath(job.outData)
            # jsonJob["outData"] = job.outData
            # Clean up
            os.remove(job.cloudDatabaseID + '.tar')
            # Update the db entry
            job.put()

            self.response.headers['Content-Type'] = 'application/json'
            self.response.write(
                json.dumps({
                    'status': True,
                    'msg': 'Job downloaded'
                }))
            return
        elif reqType == 'redirectJupyterNotebook':
            try:
                job = SensitivityJobWrapper.get_by_id(
                    int(self.request.get('id')))
                #Check if notebook already exists, if not create one
                notebook_filename = "{0}.ipynb".format(job.name)
                local_path = os.path.relpath(
                    os.path.abspath(job.outData),
                    os.path.abspath(__file__ + '/../../../'))
                notebook_file_path = os.path.abspath(
                    job.outData) + "/" + notebook_filename
                notebook_template_path = os.path.abspath(
                    __file__ + '/../../../jupyter_notebook_templates'
                ) + "/Sensitivity.ipynb"
                if not os.path.isfile(notebook_file_path):
                    logging.info("Creating {0} from {1}".format(
                        notebook_file_path, notebook_template_path))
                    shutil.copyfile(notebook_template_path, notebook_file_path)

                if self.request.get('hostname') is not None:
                    host = self.request.get('hostname')
                else:
                    host = 'localhost'
                port = 9999
                proto = 'http'
                #
                # return the url of the notebook
                notebook_url = '{0}://{1}:{2}/notebooks/{3}/{4}'.format(
                    proto, host, port, local_path, notebook_filename)
                self.redirect(notebook_url)
            except Exception as e:
                logging.error("Error in openJupyterNotebook: {0}".format(e))
                self.response.write('Error: {0}'.format(e))
            return
        elif reqType == "getLocalData":
            job = SensitivityJobWrapper.get_by_id(int(self.request.get('id')))

            if not job.zipFileName:
                szip = exportimport.SuperZip(os.path.abspath(
                    os.path.dirname(__file__) + '/../static/tmp/'),
                                             preferredName=job.name + "_")

                job.zipFileName = szip.getFileName()

                szip.addSensitivityJob(job, globalOp=True, ignoreStatus=True)

                szip.close()

                # Save the updated status
                job.put()

            relpath = os.path.relpath(
                job.zipFileName,
                os.path.abspath(os.path.dirname(__file__) + '/../'))

            self.response.headers['Content-Type'] = 'application/json'
            self.response.write(
                json.dumps({
                    'status': True,
                    'msg': 'Job downloaded',
                    'url': relpath
                }))
            return

        elif reqType == "delJob":
            job = SensitivityJobWrapper.get_by_id(int(self.request.get('id')))

            if self.user.user_id() != job.user_id:
                self.response.headers['Content-Type'] = 'application/json'
                self.response.write(json.dumps(["Not the right user"]))
                return

            job.delete(self)
            self.response.headers['Content-Type'] = 'application/json'
            self.response.write(
                json.dumps({
                    "status": True,
                    "msg": "Job deleted"
                }))

        elif reqType == "newJob":
            data = json.loads(self.request.get('data'))

            job = db.GqlQuery(
                "SELECT * FROM SensitivityJobWrapper WHERE user_id = :1 AND name = :2",
                self.user.user_id(), data["jobName"].strip()).get()

            if job != None:
                self.response.write(
                    json.dumps({
                        "status": False,
                        "msg": "Job name must be unique"
                    }))
                return

            try:
                # Either local or cloud
                if data["resource"] == "local":
                    job = self.runLocal(data)

                elif data["resource"] == "cloud":
                    job = self.runCloud(data)
                else:
                    raise Exception("Unknown resource {0}".format(
                        data["resource"]))

                self.response.write(
                    json.dumps({
                        "status": True,
                        "msg": "Job launched",
                        "id": job.key().id()
                    }))
                return
            except Exception as e:
                logging.exception(e)
                self.response.write(
                    json.dumps({
                        "status": False,
                        "msg": "Error: {0}".format(e)
                    }))
                return
        else:
            self.response.write(
                json.dumps({
                    "status": False,
                    "msg": "No data submitted"
                }))
Beispiel #49
0
    def post(self):       
        reqType = self.request.get('reqType')

        if reqType == "jobInfo":
            job = SensitivityJobWrapper.get_by_id(int(self.request.get('id')))
            
            jsonJob = { "id": int(self.request.get('id')),
                        "userId" : job.user_id,
                        "jobName" : job.name,
                        "startTime" : job.startTime,
                        "indata" : json.loads(job.indata),
                        "outData" : job.outData,
                        "status" : job.status,
                        "resource" : job.resource,
                        "uuid": job.cloudDatabaseID,
                        "output_stored": job.output_stored,
                        "modelName" : job.modelName }
            
            if self.user.user_id() != job.user_id:
                self.response.headers['Content-Type'] = 'application/json'
                self.response.write(json.dumps(["Not the right user"]))

            if job.status == "Finished":
                if job.resource in backendservices.SUPPORTED_CLOUD_RESOURCES and job.outData is None:
                    # Let the user decide if they want to download it
                    self.response.headers['Content-Type'] = 'application/json'
                    self.response.write(json.dumps({ "status" : "Finished",
                                                     "values" : [],
                                                     "job" : jsonJob}))
                    return
                outputdir = job.outData
                try:
                    # Load all data from file in JSON format
                    vhandle = open(outputdir + '/result/output.txt', 'r')
                    values = { 'time' : [], 'trajectories' : {}, 'sensitivities' : {}, 'parameters' : {}}
                    parameters = []
                    columnToList = []
                    for i, line in enumerate(vhandle):
                        if i == 0:
                            continue
                        elif i == 1:
                            names = line.split()

                            parameterNames = []

                            for name in names:
                                if ':' in name:
                                    specie, parameter = name.split(':')
                                    if parameter not in parameterNames:
                                        parameterNames.append(parameter)
                            
                            for name in names:
                                if name == 'time':
                                    columnToList.append(values['time'])
                                elif ':' in name:
                                    specie, parameter = name.split(':')

                                    if specie not in values['sensitivities']:
                                        values['sensitivities'][specie] = {}

                                    values['sensitivities'][specie][parameter] = [] # Make a new timeseries for sensitivity
                                    columnToList.append(values['sensitivities'][specie][parameter]) # Store a reference here for future use
                                else:
                                    values['trajectories'][name] = [] # start a new timeseries for this name
                                    columnToList.append(values['trajectories'][name]) # Store a reference here for future use
                        elif i == 2:
                            parameters = map(float, line.split())
                        elif i == 3:
                            for storage, value in zip(columnToList, map(float, line.split())):
                                storage.append(value)
                        elif i == 4:
                            continue
                        else:
                            for storage, value in zip(columnToList, map(float, line.split())):
                                storage.append(value)
                    vhandle.close()

                    values['parameters'] = dict(zip(parameterNames, parameters))

                    self.response.headers['Content-Type'] = 'application/json'
                    self.response.write(json.dumps({ "status" : "Finished",
                                                     "values" : values,
                                                     "job" : jsonJob }))
                    return
                except IOError as ioe:
                    logging.error("caught error {0}".format(ioe))
                    job.status = "Failed"
                    logging.error("put job.status = Failed")
                    job.put()
                    
            if job.status == "Failed":
                self.response.headers['Content-Type'] = 'application/json'

                stdout = ''
                stderr = ''
                try:
                    fstdoutHandle = open(job.outData + '/stdout.log', 'r')
                    stdout = fstdoutHandle.read()
                    fstdoutHandle.close()
                    fstderrHandle = open(job.outData + '/stderr.log', 'r')
                    stderr = fstderrHandle.read()
                    fstderrHandle.close()
                except  IOError as ioe:
                    logging.error("could not open error log files in {0}".format(job.outData))

                self.response.write(json.dumps({ "status" : "Failed",
                                                 "stdout" : stdout,
                                                 "stderr" : stderr,
                                                 "job" : jsonJob}))
                return

            # else
            self.response.headers['Content-Type'] = 'application/json'
            self.response.write(json.dumps({ "status" : "asdfasdf" }))
        elif reqType == "getFromCloud":
            job = SensitivityJobWrapper.get_by_id(int(self.request.get('id')))

            service = backendservices(self.user_data)
            service.fetchOutput(job)
            # Unpack it to its local output location
            os.system('tar -xf' +job.cloudDatabaseID+'.tar')
            job.outData = os.path.dirname(os.path.abspath(__file__))+'/../output/'+job.cloudDatabaseID
            job.outData = os.path.abspath(job.outData)
            # jsonJob["outData"] = job.outData
            # Clean up
            os.remove(job.cloudDatabaseID+'.tar')
            # Update the db entry
            job.put()
            
            self.response.headers['Content-Type'] = 'application/json'
            self.response.write(json.dumps({ 'status' : True,
                                             'msg' : 'Job downloaded'}))
            return
        elif reqType == 'redirectJupyterNotebook':
            try:
                job = SensitivityJobWrapper.get_by_id(int(self.request.get('id')))
                #Check if notebook already exists, if not create one
                notebook_filename = "{0}.ipynb".format(job.name)
                local_path = os.path.relpath(os.path.abspath(job.outData), os.path.abspath(__file__+'/../../../'))
                notebook_file_path =  os.path.abspath(job.outData) + "/" + notebook_filename
                notebook_template_path = os.path.abspath(__file__+'/../../../jupyter_notebook_templates')+"/Sensitivity.ipynb"
                if not os.path.isfile(notebook_file_path):
                    logging.info("Creating {0} from {1}".format(notebook_file_path,notebook_template_path))
                    shutil.copyfile(notebook_template_path, notebook_file_path)

                if self.request.get('hostname') is not None:
                    host = self.request.get('hostname')
                else:
                    host = 'localhost'
                port = 9999
                proto = 'http'
                #
                # return the url of the notebook
                notebook_url = '{0}://{1}:{2}/notebooks/{3}/{4}'.format(proto,host,port,local_path,notebook_filename)
                self.redirect(notebook_url)
            except Exception as e:
                logging.error("Error in openJupyterNotebook: {0}".format(e))
                self.response.write('Error: {0}'.format(e))
            return   
        elif reqType == "getLocalData":
            job = SensitivityJobWrapper.get_by_id(int(self.request.get('id')))
            
            if not job.zipFileName:
                szip = exportimport.SuperZip(os.path.abspath(os.path.dirname(__file__) + '/../static/tmp/'), preferredName = job.name + "_")
                
                job.zipFileName = szip.getFileName()

                szip.addSensitivityJob(job, globalOp = True, ignoreStatus = True)
                
                szip.close()

                # Save the updated status
                job.put()
            
            
            relpath = os.path.relpath(job.zipFileName, os.path.abspath(os.path.dirname(__file__) + '/../'))

            self.response.headers['Content-Type'] = 'application/json'
            self.response.write(json.dumps({ 'status' : True,
                                             'msg' : 'Job downloaded',
                                             'url' : relpath }))
            return

        elif reqType == "delJob":
            job = SensitivityJobWrapper.get_by_id(int(self.request.get('id')))

            if self.user.user_id() != job.user_id:
                self.response.headers['Content-Type'] = 'application/json'
                self.response.write(json.dumps(["Not the right user"]))
                return

            job.delete(self)
            self.response.headers['Content-Type'] = 'application/json'
            self.response.write(json.dumps({ "status" : True,
                                             "msg" : "Job deleted"}));

        elif reqType == "newJob":
            data = json.loads(self.request.get('data'))

            job = db.GqlQuery("SELECT * FROM SensitivityJobWrapper WHERE user_id = :1 AND name = :2", self.user.user_id(), data["jobName"].strip()).get()

            if job != None:
                self.response.write(json.dumps({"status" : False,
                                                "msg" : "Job name must be unique"}))
                return

            try:
                # Either local or cloud
                if data["resource"] == "local":
                    job = self.runLocal(data)

                elif data["resource"] == "cloud":
                    job = self.runCloud(data)
                else:
                    raise Exception("Unknown resource {0}".format(data["resource"]))

                self.response.write(json.dumps( { "status" : True,
                                                  "msg" : "Job launched",
                                                  "id" : job.key().id() }))
                return
            except Exception as e:
                logging.exception(e)
                self.response.write(json.dumps({ "status" : False,
                                                 "msg" : "Error: {0}".format(e) }))
                return
        else:
            self.response.write(json.dumps({"status" : False,
                                            "msg" : "No data submitted"}))
Beispiel #50
0
    def runCloud(self, data):
        self.user_data.set_selected(1)
        job = SensitivityJobWrapper()

        service = backendservices(self.user_data)

        if not service.isOneOrMoreComputeNodesRunning():
            raise Exception('No cloud computing resources found. (Have they been started?)')

        job.user_id = self.user.user_id()
        model = modeleditor.StochKitModelWrapper.get_by_id(data["id"])
        job.startTime = time.strftime("%Y-%m-%d-%H-%M-%S")
        job.name = data["jobName"]
        job.status = "Pending"
        job.modelName = model.name

        runtime = float(data["time"])
        dt = float(data["increment"])

        job.indata = json.dumps(data)

        parameters = []
        for parameter in data['selections']["pc"]:
            if data['selections']["pc"][parameter]:
                parameters.append(parameter)
        stochkitmodel = model.createStochKitModel() 

        # Wow, what a hack
        if stochkitmodel.units.lower() == 'population':
            document = stochkitmodel.serialize()
            
            stochkitmodel = StochMLDocument.fromString(document).toModel(model.name)
            
            for reactionN in stochkitmodel.getAllReactions():
                reaction = stochkitmodel.getAllReactions()[reactionN]
                if reaction.massaction:
                    if len(reaction.reactants) == 1 and reaction.reactants.values()[0] == 2:
                        reaction.marate.setExpression(reaction.marate.expression + ' / 2')

        params = {
            "job_type": "sensitivity",
            "document": str( stochkitmodel.serialize() ),
            "paramstring": "stochkit_ode.py --sensi --parameters {0} -t {1} -i {2}".format(
                                " ".join(parameters), runtime, int(runtime / dt)
                            ),
            "bucketname": self.user_data.getBucketName()
        }

        # Send the task to the backend
        cloud_result = service.submit_cloud_task(params)

        # if not cloud_result["success"]:
        if not cloud_result["success"]:
            return None, cloud_result
            
        job.cloudDatabaseID = cloud_result["db_id"]
        job.celeryPID = cloud_result["celery_pid"]
        job.resource = cloud_result['resource']
        job.outData = None
        job.zipFileName = None
        job.output_stored = 'True'
        job.put()
        return job