def start(self, workerType="thread", maxWorkers=10, timezone=utc): self.__dbSession = db.dbSession self.__dbCommon = DBCommon(self.__dbSession) if maxWorkers < 3: maxWorkers = 3 executors = { "default": { "type": "threadpool", "max_workers": maxWorkers }, "jobs": { "type": "threadpool", "max_workers": maxWorkers }, "executions": { "type": "threadpool", "max_workers": maxWorkers * 3 } } job_defaults = {"coalesce": True, "max_instances": 1} self.__scheduler.configure(executors=executors, job_defaults=job_defaults, timezone=timezone) self.__scheduler.start() self.refreshJobs()
def __init__(self, jobID, scheduler): self._waitCheckIntervalSeconds = 5 self._scheduler = scheduler self._dbSession = db.dbSession self._dbCommon = DBCommon(self._dbSession) self._jobID = jobID self._job = self._dbCommon.getJob(jobID) self._jobRunID = self._dbCommon.getNextJobRunID(jobID) self._logger = logging.getLogger(__name__)
def updateExecutions(self): from app.dbcommon import DBCommon from app.status import StatusTypes dbcommon = DBCommon(self.dbSession) executions = dbcommon.getAllRunningExecutions() for execution in executions: execution.execution_status = StatusTypes.code[ "ExecutionNotComplete"] self.dbSession.commit()
def updateProjectJobs(self): from app.dbcommon import DBCommon from app import app dbcommon = DBCommon(self.dbSession) projectJobs = dbcommon.getAllProjectExportJobs() for job in projectJobs: job.run_frequency_seconds = app.config[ "EXPORT_JOB_SCHEDULE_DEFAULT_FREQUENCY_SECONDS"] self.dbSession.commit()
def __init__(self, dominoAPIKey, dbSession): self.dominoAPIKey = dominoAPIKey self.dbSession = dbSession self.dbCommon = DBCommon(self.dbSession) self.dominoAPI = DominoAPISession( app.config["DOMINO_API_SERVER"], self.dominoAPIKey, verifySSL=app.config["DOMINO_API_SERVER_VERIFY_SSL"]) self.reDockerRegistryName = re.compile( "^[a-z0-9]+(?:[._-]{1,2}[a-z0-9]+)*$")
class BaseJob(object): def __init__(self, jobID, scheduler): self._waitCheckIntervalSeconds = 5 self._scheduler = scheduler self._dbSession = db.dbSession self._dbCommon = DBCommon(self._dbSession) self._jobID = jobID self._job = self._dbCommon.getJob(jobID) self._jobRunID = self._dbCommon.getNextJobRunID(jobID) self._logger = logging.getLogger(__name__) def isJobAlreadyRunning(self): return self._dbCommon.isJobRunning(self._jobID) def run(self): for execution in self._dbCommon.getRunningExecutionsForJobRun( self._jobID, self._jobRunID): self._scheduler.addExecution(execution.execution_id) def wait(self): while self._dbCommon.getRunningExecutionsForJobRun( self._jobID, self._jobRunID): sleep(self._waitCheckIntervalSeconds) def addExecution(self, execution): self._dbSession.add(execution) # Need to commit to generate execution_id to use below self._dbSession.commit() self._dbSession.add( models.JobRun(job_id=self._jobID, job_run_id=self._jobRunID, execution_type=execution.execution_type, associated_execution_id=execution.execution_id, last_successful_execution_id=None)) self._dbSession.commit() def addSubTasks(self, tasks): for task in tasks: execution = models.Execution(execution_type=task, job_run_id=self._jobRunID, job_id=self._jobID, execution_details=encrypter.encrypt( json.dumps({ "exception": { "EXCEPTION_TYPE": None, "EXCEPTION_DETAILS": None } }))) execution.execution_status = StatusTypes.code["Scheduled"] self.addExecution(execution)
def __init__(self, executionID, scheduler): self._scheduler = scheduler self._dbSession = db.dbSession self._dbCommon = DBCommon(self._dbSession) self._execution = self._dbCommon.getExecution(executionID) self._jobRun = self._dbCommon.getJobRun(self._execution.job_id, self._execution.job_run_id, self._execution.execution_type) self._logger = logging.getLogger(__name__) try: self.start() self.run() except Exception as e: raise (e) finally: self.stop()
class Cleanup(object): def __init__(self, dbSession): self.dbSession = dbSession self.dbCommon = DBCommon(self.dbSession) def pruneMetrics(self): dt = datetime.today() - timedelta( days=app.config.get("DATABASE_HISTORY_AGE_DAYS", 30)) metrics = self.dbCommon.getAllMetricsPriorToDatetime(dt) metrics.delete(synchronize_session='fetch') self.dbSession.commit() def pruneExecutions(self): dt = datetime.today() - timedelta( days=app.config.get("DATABASE_HISTORY_AGE_DAYS", 30)) executions = self.dbCommon.getAllExecutionsPriorToDatetime(dt) executionIDs = [x[0] for x in executions.values("execution_id")] jobruns = self.dbCommon.getJobRunByExecutionIDs(executionIDs) jobruns.delete(synchronize_session='fetch') executions.delete(synchronize_session='fetch') self.dbSession.commit()
class AdministrationAPI(object): def __init__(self, dbSession): self.dbSession = dbSession self.dbCommon = DBCommon(self.dbSession) self.healthMetrics = HealthMetrics() def health(self): import pytz import datetime respCode = 200 (respCode, version) = self.version() collectionTimestamp = datetime.datetime.utcnow() dominoAPIHealthy = None dominoDockerRegistryHealthy = None externalDockerRegistryHealthy = None S3BucketHealthy = None metrics = self.dbCommon.getLatestHealthMetrics() if metrics: collectionTimestamp = metrics.collection_timestamp dominoAPIHealthy = metrics.domino_api_healthy dominoDockerRegistryHealthy = metrics.domino_docker_registry_healthy externalDockerRegistryHealthy = metrics.external_docker_registry_healthy S3BucketHealthy = metrics.external_s3_bucket_healthy healthStatus = { "overall_healthy": dominoAPIHealthy and dominoDockerRegistryHealthy and externalDockerRegistryHealthy and S3BucketHealthy, "api_version": version["api_version"], "health_check_timestamp": str(pytz.utc.localize(collectionTimestamp)), "domino_platform_connection_healthy": dominoAPIHealthy, "domino_registry_connection_healthy": dominoDockerRegistryHealthy, "s3_connection_healthy": S3BucketHealthy, "external_registry_connection_healthy": externalDockerRegistryHealthy, "last_successful_backup_job_timestamp": None } return (respCode, healthStatus) def version(self): respCode = 200 version = {"api_version": app.config["API_VERSION"]} return (respCode, version)
class BaseExecution(object): def __init__(self, executionID, scheduler): self._scheduler = scheduler self._dbSession = db.dbSession self._dbCommon = DBCommon(self._dbSession) self._execution = self._dbCommon.getExecution(executionID) self._jobRun = self._dbCommon.getJobRun(self._execution.job_id, self._execution.job_run_id, self._execution.execution_type) self._logger = logging.getLogger(__name__) try: self.start() self.run() except Exception as e: raise (e) finally: self.stop() def start(self): self.setStartTimestamp() def run(self): from app import app if not self._execution.jobs.job_active: self.setExecutionStatus(StatusTypes.code["Disabled"]) else: self.setExecutionStatus(StatusTypes.code["Running"]) try: taskStatus = self.defaultTask( timeout=app.config["JOB_TASK_TIMEOUT_IN_SECONDS"]) if taskStatus: self.setExecutionStatus(taskStatus) else: self.setExecutionStatus(StatusTypes.code["Completed"]) except Exception as e: exceptionType = type(e).__name__ self.setExecutionStatus( StatusTypes.code.get(exceptionType, StatusTypes.code["UnknownError"])) self.saveExceptionDetails(exceptionType, str(e)) raise (e) def stop(self): self.setEndTimestamp() @stopit.threading_timeoutable( default=StatusTypes.code["ExecutionRunTimeout"]) def defaultTask(self): pass def updateJobRun(self, successfulExecutionID=None): if successfulExecutionID: self._jobRun.last_successful_execution_id = successfulExecutionID else: self._jobRun.last_successful_execution_id = self._execution.execution_id self._jobRun.job_run_updated_timestamp = DBHelpers.now() self._dbSession.commit() def updateJobTaskStates(self, taskStates): jobDetails = json.loads( encrypter.decrypt(self._execution.jobs.job_details)) if "taskState" not in jobDetails: jobDetails["taskState"] = {} for taskUpdate in taskStates: task = taskUpdate.get("task", None) taskInfo = taskUpdate.get("taskInfo", {}) if task in jobDetails["taskState"]: jobDetails["taskState"][task].update(taskInfo) elif task: jobDetails["taskState"][task] = taskInfo self._execution.jobs.job_details = encrypter.encrypt( json.dumps(jobDetails)) self._dbSession.commit() def updateExecutionDetails(self, newInfo): executionDetails = {} if self._execution.execution_details: executionDetails = json.loads( encrypter.decrypt(self._execution.execution_details)) executionDetails.update(newInfo) self._execution.execution_details = encrypter.encrypt( json.dumps(executionDetails)) self._dbSession.commit() def saveExceptionDetails(self, exceptionType, exceptionMessage): self.updateExecutionDetails({ "exception": { "EXCEPTION_TYPE": exceptionType, "EXCEPTION_DETAILS": exceptionMessage } }) def setExecutionStatus(self, statusCode): self._execution.execution_status = statusCode self._dbSession.commit() def setStartTimestamp(self): self._execution.execution_started_timestamp = DBHelpers.now() self._dbSession.commit() def setEndTimestamp(self): self._execution.execution_ended_timestamp = DBHelpers.now() self._dbSession.commit()
def updateServiceJobs(self): from app.dbcommon import DBCommon import app.models as models from app import app dbcommon = DBCommon(self.dbSession) # Check if no jobs scheduled s3ExportJobs = dbcommon.getServicesJobs("AllExportJobsS3Status") if not s3ExportJobs: job = models.Job( job_type="AllExportJobsS3Status", job_user=None, job_project=None, job_export_group=None, job_export_project=None, run_frequency_seconds=app.config[ "EXPORTS_PROJECT_FILES_S3_TOP_LEVEL_LOG_FREQUENCY_SECONDS"], job_secrets=None, job_details="") self.dbSession.add(job) self.dbSession.commit() else: for job in s3ExportJobs: job.run_frequency_seconds = app.config[ "EXPORTS_PROJECT_FILES_S3_TOP_LEVEL_LOG_FREQUENCY_SECONDS"] self.dbSession.commit() metricsCollectionJobs = dbcommon.getServicesJobs( "HealthMetricsCollection") if not metricsCollectionJobs: job = models.Job(job_type="HealthMetricsCollection", job_user=None, job_project=None, job_export_group=None, job_export_project=None, run_frequency_seconds=app. config["HEALTHCHECK_SCHEDULE_FREQUENCY_SECONDS"], job_secrets=None, job_details="") self.dbSession.add(job) self.dbSession.commit() else: for job in metricsCollectionJobs: job.run_frequency_seconds = app.config[ "HEALTHCHECK_SCHEDULE_FREQUENCY_SECONDS"] self.dbSession.commit() databasePruneJobs = dbcommon.getServicesJobs("DatabasePrune") if not databasePruneJobs: job = models.Job(job_type="DatabasePrune", job_user=None, job_project=None, job_export_group=None, job_export_project=None, run_frequency_seconds=app. config["DATABASE_PRUNE_FREQUENCY_SECONDS"], job_secrets=None, job_details="") self.dbSession.add(job) self.dbSession.commit() else: for job in databasePruneJobs: job.run_frequency_seconds = app.config[ "DATABASE_PRUNE_FREQUENCY_SECONDS"] self.dbSession.commit()
class Scheduler(object): def __init__(self, ): self.__scheduler = BackgroundScheduler() self.__dbSession = None self.__dbCommon = None self.__runningJobs = [] self.__jobTypes = { "ProjectExport": Jobs.ProjectExportJob, "AllExportJobsS3Status": Jobs.UpdateAllExportStatusS3Job, "HealthMetricsCollection": Jobs.HealthMetricsCollectionJob, "DatabasePrune": Jobs.DatabasePruneJob } self.__executionTypes = { "ProjectFilesExportTask": Jobs.ProjectFilesExportTask, "ProjectDockerImageExportTask": Jobs.ProjectDockerImageExportTask, "ProjectExportReportToS3Task": Jobs.ProjectExportReportToS3Task, "UpdateAllExportStatusS3Task": Jobs.UpdateAllExportStatusS3Task, "HealthMetricsCollectionTask": Jobs.HealthMetricsCollectionTask, "DatabasePruneTask": Jobs.DatabasePruneTask } def start(self, workerType="thread", maxWorkers=10, timezone=utc): self.__dbSession = db.dbSession self.__dbCommon = DBCommon(self.__dbSession) if maxWorkers < 3: maxWorkers = 3 executors = { "default": { "type": "threadpool", "max_workers": maxWorkers }, "jobs": { "type": "threadpool", "max_workers": maxWorkers }, "executions": { "type": "threadpool", "max_workers": maxWorkers * 3 } } job_defaults = {"coalesce": True, "max_instances": 1} self.__scheduler.configure(executors=executors, job_defaults=job_defaults, timezone=timezone) self.__scheduler.start() self.refreshJobs() #self.__scheduler.print_jobs() def refreshJobs(self): self.__scheduler.remove_all_jobs() for job in self.__dbCommon.getAllJobs(): self.addJob(jobID=job.job_id, runNow=False) def updateJob(self, jobID): job = self.__dbCommon.getJob(jobID) if job.export_id in self.__runningJobs: # This should allow us to refresh the job # APScheduler will allow any prior, running jobs to complete without # killing them when we remove the job from the scheduler self.__scheduler.remove_job(job.export_id) # Newly add the job with the new details self.addJob(job.job_id) def addJob(self, jobID, runNow=True): # Think about adding a try; except clause here to not crash the server if there is an issue job = self.__dbCommon.getJob(jobID) nowTrigger = DateTrigger(run_date=datetime.now(tz=timezone.utc)) scheduledTrigger = IntervalTrigger(seconds=job.run_frequency_seconds) scheduledJob = None jobRunner = self.__jobTypes.get(job.job_type, Jobs.BaseJob) if runNow: nowJob = self.__scheduler.add_job(func=jobRunner, args=[job.job_id, self], id=None, executor="jobs", misfire_grace_time=60, trigger=nowTrigger) #print("Added Project Export Job {0} with export_id {1} as now {2} trigger".format(job, job.export_id, type(nowTrigger))) scheduledJob = self.__scheduler.add_job(func=jobRunner, args=[job.job_id, self], id=job.export_id, executor="jobs", misfire_grace_time=60, trigger=scheduledTrigger, jitter=300) #print("Added Job {0} with export_id {1} as interval {2} trigger".format(job, job.export_id, type(scheduledTrigger))) self.__runningJobs.append(job.export_id) return scheduledJob def removeJob(self, jobID): try: job = self.__dbCommon.getJob(jobID) if job: self.__scheduler.remove_job(job.export_id) except: pass def removeExecution(self, executionID, statusCode=None): try: execution = self.__dbCommon.getExecution(executionID) if execution: if statusCode: execution.execution_status = statusCode self.__scheduler.remove_job(execution.external_execution_id) except: pass def addExecution(self, executionID): # Think about adding a try; except clause here to not crash the server if there is an issue execution = self.__dbCommon.getExecution(executionID) now = datetime.now(tz=timezone.utc) nowTrigger = DateTrigger(run_date=now) scheduledJob = self.__scheduler.add_job( func=self.__executionTypes.get(execution.execution_type, Jobs.BaseExecution), args=[execution.execution_id, self], id=execution.external_execution_id, executor="executions", trigger=nowTrigger) #print("Added Execution with export_id {0} as '{1}' trigger".format(execution.external_execution_id, now)) return scheduledJob
def __init__(self, dbSession): self.dbSession = dbSession self.dbCommon = DBCommon(self.dbSession)
def __init__(self, dbSession): self.dbSession = dbSession self.dbCommon = DBCommon(self.dbSession) self.healthMetrics = HealthMetrics()
class ProjectsAPI(object): def __init__(self, dominoAPIKey, dbSession): self.dominoAPIKey = dominoAPIKey self.dbSession = dbSession self.dbCommon = DBCommon(self.dbSession) self.dominoAPI = DominoAPISession( app.config["DOMINO_API_SERVER"], self.dominoAPIKey, verifySSL=app.config["DOMINO_API_SERVER_VERIFY_SSL"]) self.reDockerRegistryName = re.compile( "^[a-z0-9]+(?:[._-]{1,2}[a-z0-9]+)*$") def create(self, username, projectName, exportGroupName, exportProjectName): respCode = 201 jobData = { "success": None, "message": None, "export_id": None, "export_frequency_seconds": None } try: if not self.dominoAPI.isValidAPIKey(): raise (DominoAPIKeyInvalid) jobType = "ProjectExport" jobRunFrequencyInSeconds = app.config[ "EXPORT_JOB_SCHEDULE_DEFAULT_FREQUENCY_SECONDS"] # Expect to get Exceptions here if the Domino API Key does not provide access to the Project projectInfo = self.dominoAPI.findProjectByOwnerAndName( username, projectName) if not self.dominoAPI.hasAccessToProject(username, projectName): raise (DominoAPIUnauthorized) # Check export group and project names for compliance with Docker Registry naming requirements if not self.reDockerRegistryName.match(exportGroupName): if self.reDockerRegistryName.match(exportGroupName.lower()): exportGroupName = exportGroupName.lower() jobMessageFormat = "{MESSAGE}" if jobData["message"]: jobMessageFormat = "{ORIGINAL}; {MESSAGE}" jobData["message"] = jobMessageFormat.format( ORIGINAL=jobData["message"], MESSAGE= "Warning: request has been processed, but the Export Group Name has been automatically converted to lower case to comply with Docker Registry standards" ) else: raise (ExportAPIInvalidExportGroupName) if not self.reDockerRegistryName.match(exportProjectName): if self.reDockerRegistryName.match(exportProjectName.lower()): exportProjectName = exportProjectName.lower() jobMessageFormat = "{MESSAGE}" if jobData["message"]: jobMessageFormat = "{ORIGINAL}; {MESSAGE}" jobData["message"] = jobMessageFormat.format( ORIGINAL=jobData["message"], MESSAGE= "Warning: request has been processed, but the Export Project Name has been automatically converted to lower case to comply with Docker Registry standards" ) else: raise (ExportAPIInvalidExportProjectName) # Expect to get Exceptions here if the job already exists self.dbCommon.raiseOnJobExists( username, projectName, exportGroupName, exportProjectName, app.config.get("ALLOW_SAME_PROJECT_EXPORTS", False)) # Do the actual work here jobDetails = { "taskState": { "ProjectFilesExportTask": { "lastCompletedExecutionID": None, "commitID": None }, "ProjectDockerImageExportTask": { "lastCompletedExecutionID": None, "computeEnvironmentID": None, "computeEnvironmentRevision": None }, "ProjectExportReportToS3Task": { "lastCompletedExecutionID": None, "statusSaved": False } }, "dockerBuildTemplateFile": "Standard.Dockerfile" } job = models.Job(job_type=jobType, job_user=username.lower(), job_project=projectName, job_export_group=exportGroupName, job_export_project=exportProjectName, run_frequency_seconds=jobRunFrequencyInSeconds, job_secrets=encrypter.encrypt(self.dominoAPIKey), job_details=encrypter.encrypt( json.dumps(jobDetails))) self.dbSession.add(job) self.dbSession.commit() jobData["success"] = True jobData["export_id"] = job.export_id jobData["export_frequency_seconds"] = job.run_frequency_seconds # Schedule job with scheduler scheduler.addJob(job.job_id, True) except BadRequest: respCode = 400 jobData["success"] = False jobData["message"] = StatusTypes.messageFromType[ "ExportAPIMalformedJSON"] except DominoAPINotFound: respCode = 400 jobData["success"] = False jobData["message"] = StatusTypes.messageFromType[ "ExportAPIProjectNotExist"] except (DominoAPIKeyInvalid, DominoAPIUnauthorized): respCode = 401 jobData["success"] = False jobData["message"] = StatusTypes.messageFromType[ "ExportAPIProjectNoAccess"] except DBExportJobExists: respCode = 409 jobData["success"] = False jobData["message"] = StatusTypes.messageFromType[ "ExportAPIExportNameConflict"] except DBProjectJobExists: respCode = 409 jobData["success"] = False jobData["message"] = StatusTypes.messageFromType[ "ExportAPIDominoNameConflict"] except ExportAPIInvalidExportGroupName: respCode = 422 jobData["success"] = False jobData["message"] = StatusTypes.messageFromType[ "ExportAPIInvalidExportGroupName"] except ExportAPIInvalidExportProjectName: respCode = 422 jobData["success"] = False jobData["message"] = StatusTypes.messageFromType[ "ExportAPIInvalidExportProjectName"] except (DominoAPIUnexpectedError, Exception) as e: respCode = 503 jobData["success"] = False jobData["message"] = StatusTypes.messageFromType[ "UnknownError"].format(repr(e)) raise (e) return (respCode, jobData) def update(self, identity, updateAPIKey, exportGroupName, exportProjectName, disabled): respCode = 200 jobData = { "success": None, "message": None, "export_id": None, "export_frequency_seconds": None } try: if not self.dominoAPI.isValidAPIKey(): raise (DominoAPIKeyInvalid) job = self.dbCommon.getJobByExportID(identity) if not job: raise (DBExportJobDoesNotExist) # Expect to get Exceptions here if the Domino API Key does not provide access to the Project projectInfo = self.dominoAPI.findProjectByOwnerAndName( job.job_user, job.job_project) if not self.dominoAPI.hasAccessToProject(job.job_user, job.job_project): raise (DominoAPIUnauthorized) if updateAPIKey: job.job_secrets = encrypter.encrypt(self.dominoAPIKey) if exportGroupName: # Check export group name for compliance with Docker Registry naming requirements if not self.reDockerRegistryName.match(exportGroupName): if self.reDockerRegistryName.match( exportGroupName.lower()): exportGroupName = exportGroupName.lower() jobMessageFormat = "{MESSAGE}" if jobData["message"]: jobMessageFormat = "{ORIGINAL}; {MESSAGE}" jobData["message"] = jobMessageFormat.format( ORIGINAL=jobData["message"], MESSAGE= "Warning: request has been processed, but the Export Group Name has been automatically converted to lower case to comply with Docker Registry standards" ) else: raise (ExportAPIInvalidExportGroupName) job.job_export_group = exportGroupName if exportProjectName: # Check export project name for compliance with Docker Registry naming requirements if not self.reDockerRegistryName.match(exportProjectName): if self.reDockerRegistryName.match( exportProjectName.lower()): exportProjectName = exportProjectName.lower() jobMessageFormat = "{MESSAGE}" if jobData["message"]: jobMessageFormat = "{ORIGINAL}; {MESSAGE}" jobData["message"] = jobMessageFormat.format( ORIGINAL=jobData["message"], MESSAGE= "Warning: request has been processed, but the Export Project Name has been automatically converted to lower case to comply with Docker Registry standards" ) else: raise (ExportAPIInvalidExportProjectName) job.job_export_project = exportProjectName if type(disabled) == bool: job.job_active = (not disabled) if exportGroupName or exportProjectName: # Force project file and Docker image export tasks to run during next schedule jobDetails = json.loads(encrypter.decrypt(job.job_details)) taskState = jobDetails.get("taskState", {}) taskState["ProjectFilesExportTask"]["commitID"] = None taskState["ProjectDockerImageExportTask"][ "computeEnvironmentID"] = None taskState["ProjectDockerImageExportTask"][ "computeEnvironmentRevision"] = None jobDetails["taskState"] = taskState job.job_details = encrypter.encrypt(json.dumps(jobDetails)) self.dbSession.commit() jobData["success"] = True jobData["export_id"] = job.export_id jobData["export_frequency_seconds"] = job.run_frequency_seconds except BadRequest: respCode = 400 jobData["success"] = False jobData["message"] = StatusTypes.messageFromType[ "ExportAPIMalformedJSON"] except DominoAPINotFound: respCode = 400 jobData["success"] = False jobData["message"] = StatusTypes.messageFromType[ "ExportAPIProjectNotExist"] except (DominoAPIKeyInvalid, DominoAPIUnauthorized): respCode = 401 jobData["success"] = False jobData["message"] = StatusTypes.messageFromType[ "ExportAPIProjectNoAccess"] except DBExportJobDoesNotExist: respCode = 404 jobData["success"] = False jobData["message"] = StatusTypes.messageFromType[ "ExportAPIExportIDNotExist"] except ExportAPIInvalidExportGroupName: respCode = 422 jobData["success"] = False jobData["message"] = StatusTypes.messageFromType[ "ExportAPIInvalidExportGroupName"] except ExportAPIInvalidExportProjectName: respCode = 422 jobData["success"] = False jobData["message"] = StatusTypes.messageFromType[ "ExportAPIInvalidExportProjectName"] except (DominoAPIUnexpectedError, Exception) as e: respCode = 503 jobData["success"] = False jobData["message"] = StatusTypes.messageFromType[ "UnknownError"].format(repr(e)) return (respCode, jobData) def status(self, identity=None, projectName=None): respCode = 200 jobData = [] try: if not self.dominoAPI.isValidAPIKey(): raise (DominoAPIKeyInvalid) userJobs = [] jobs = self.dbCommon.getAllProjectExportJobs() for job in jobs: if self.dominoAPI.hasAccessToProject(job.job_user, job.job_project): userJobs.append(job) for userJob in userJobs: if identity: if projectName: # Status by userName and projectName if (userJob.job_user == identity.lower()) and (userJob.job_project == projectName): jobData = self.dbCommon.projectExportStatusHistory( userJob.job_id, app.config.get("API_STATUS_LOG_MAX_RECORDS", 10), True) else: # Status by export_id if userJob.export_id == identity.lower(): jobData = self.dbCommon.projectExportStatusHistory( userJob.job_id, app.config.get("API_STATUS_LOG_MAX_RECORDS", 10), True) break # Status by userName elif userJob.job_user == identity.lower(): status = self.dbCommon.projectExportStatusLastHistory( userJob.job_id) if status: jobData.append(status) # Status for all jobs else: status = self.dbCommon.projectExportStatusLastHistory( userJob.job_id) if status: jobData.append(status) except DominoAPIKeyInvalid: respCode = 401 except (DominoAPIUnexpectedError, Exception) as e: respCode = 503 raise (e) return (respCode, jobData)