def getErrorReportURLsForSubmission(self): """ Gets the Signed URLs for download based on the submissionId """ try : self.s3manager = s3UrlHandler() safeDictionary = RequestDictionary(self.request) submissionId = safeDictionary.getValue("submission_id") responseDict ={} for jobId in self.jobManager.getJobsBySubmission(submissionId): if(self.jobManager.getJobType(jobId) == "csv_record_validation"): if(not self.isLocal): responseDict["job_"+str(jobId)+"_error_url"] = self.s3manager.getSignedUrl("errors",self.jobManager.getReportPath(jobId),"GET") else: path = os.path.join(self.serverPath, self.jobManager.getReportPath(jobId)) responseDict["job_"+str(jobId)+"_error_url"] = path if(not self.isLocal): crossFileReport = self.s3manager.getSignedUrl("errors",self.jobManager.getCrossFileReportPath(submissionId),"GET") else: crossFileReport = os.path.join(self.serverPath, self.jobManager.getCrossFileReportPath(submissionId)) responseDict["cross_file_error_url"] = crossFileReport return JsonResponse.create(StatusCode.OK,responseDict) except ResponseException as e: return JsonResponse.error(e,StatusCode.CLIENT_ERROR) except Exception as e: # Unexpected exception, this is a 500 server error return JsonResponse.error(e,StatusCode.INTERNAL_ERROR)
def getRss(self): response = {} if self.isLocal: response["rss_url"] = os.path.join(self.serverPath, CONFIG_BROKER["rss_folder"],CONFIG_BROKER["rss_file"]) else: self.s3manager = s3UrlHandler() response["rss_url"] = self.s3manager.getSignedUrl(CONFIG_BROKER["rss_folder"],CONFIG_BROKER["rss_file"],"GET") return JsonResponse.create(200,response)
def getRss(self): """ Returns a signed URL to the RSS document. If local returns local path to RSS. """ response = {} if self.isLocal: response["rss_url"] = os.path.join(self.serverPath, CONFIG_BROKER["rss_folder"],CONFIG_BROKER["rss_file"]) else: self.s3manager = s3UrlHandler() response["rss_url"] = self.s3manager.getSignedUrl(CONFIG_BROKER["rss_folder"],CONFIG_BROKER["rss_file"],"GET") return JsonResponse.create(200,response)
def submit(self,name,CreateCredentials): """ Builds S3 URLs for a set of files and adds all related jobs to job tracker database Flask request should include keys from FILE_TYPES class variable above Arguments: name -- User ID from the session handler Returns: Flask response returned will have key_url and key_id for each key in the request key_url is the S3 URL for uploading key_id is the job id to be passed to the finalize_submission route """ try: responseDict= {} fileNameMap = [] safeDictionary = RequestDictionary(self.request) submissionId = self.jobManager.createSubmission(name, safeDictionary) existingSubmission = False if safeDictionary.exists("existing_submission_id"): existingSubmission = True # Check if user has permission to specified submission self.checkSubmissionPermission(self.jobManager.getSubmissionById(submissionId)) for fileType in FileHandler.FILE_TYPES : # If filetype not included in request, and this is an update to an existing submission, skip it if not safeDictionary.exists(fileType): if existingSubmission: continue else: # This is a new submission, all files are required raise ResponseException("Must include all files for new submission",StatusCode.CLIENT_ERROR) filename = safeDictionary.getValue(fileType) if( safeDictionary.exists(fileType)) : if(not self.isLocal): uploadName = str(name)+"/"+s3UrlHandler.getTimestampedFilename(filename) else: uploadName = filename responseDict[fileType+"_key"] = uploadName fileNameMap.append((fileType,uploadName,filename)) fileJobDict = self.jobManager.createJobs(fileNameMap,submissionId,existingSubmission) for fileType in fileJobDict.keys(): if (not "submission_id" in fileType) : responseDict[fileType+"_id"] = fileJobDict[fileType] if(CreateCredentials and not self.isLocal) : self.s3manager = s3UrlHandler(CONFIG_BROKER["aws_bucket"]) responseDict["credentials"] = self.s3manager.getTemporaryCredentials(name) else : responseDict["credentials"] ={"AccessKeyId" : "local","SecretAccessKey" :"local","SessionToken":"local" ,"Expiration" :"local"} responseDict["submission_id"] = fileJobDict["submission_id"] if self.isLocal: responseDict["bucket_name"] = CONFIG_BROKER["broker_files"] else: responseDict["bucket_name"] = CONFIG_BROKER["aws_bucket"] return JsonResponse.create(StatusCode.OK,responseDict) except (ValueError , TypeError, NotImplementedError) as e: return JsonResponse.error(e,StatusCode.CLIENT_ERROR) except Exception as e: # Unexpected exception, this is a 500 server error return JsonResponse.error(e,StatusCode.INTERNAL_ERROR) except: return JsonResponse.error(Exception("Failed to catch exception"),StatusCode.INTERNAL_ERROR)
def getErrorReportURLsForSubmission(self, isWarning=False): """ Gets the Signed URLs for download based on the submissionId """ try: self.s3manager = s3UrlHandler() safeDictionary = RequestDictionary(self.request) submissionId = safeDictionary.getValue("submission_id") responseDict = {} sess = GlobalDB.db().session for jobId in self.jobManager.getJobsBySubmission(submissionId): # get the job object here so we can call the refactored getReportPath # todo: replace other db access functions with job object attributes job = sess.query(Job).filter(Job.job_id == jobId).one() if job.job_type.name == 'csv_record_validation': if isWarning: reportName = getReportPath(job, 'warning') key = "job_" + str(jobId) + "_warning_url" else: reportName = getReportPath(job, 'error') key = "job_" + str(jobId) + "_error_url" if (not self.isLocal): responseDict[key] = self.s3manager.getSignedUrl( "errors", reportName, method="GET") else: path = os.path.join(self.serverPath, reportName) responseDict[key] = path # For each pair of files, get url for the report fileTypes = self.interfaces.validationDb.getFileTypeList() for source in fileTypes: sourceId = self.interfaces.validationDb.getFileTypeIdByName( source) for target in fileTypes: targetId = self.interfaces.validationDb.getFileTypeIdByName( target) if targetId <= sourceId: # Skip redundant reports continue # Retrieve filename if isWarning: reportName = getCrossWarningReportName( submissionId, source, target) else: reportName = getCrossReportName( submissionId, source, target) # If not local, get a signed URL if self.isLocal: reportPath = os.path.join(self.serverPath, reportName) else: reportPath = self.s3manager.getSignedUrl("errors", reportName, method="GET") # Assign to key based on source and target responseDict[self.getCrossReportKey( source, target, isWarning)] = reportPath return JsonResponse.create(StatusCode.OK, responseDict) except ResponseException as e: return JsonResponse.error(e, StatusCode.CLIENT_ERROR) except Exception as e: # Unexpected exception, this is a 500 server error return JsonResponse.error(e, StatusCode.INTERNAL_ERROR)
def submit(self, name, CreateCredentials): """ Builds S3 URLs for a set of files and adds all related jobs to job tracker database Flask request should include keys from FILE_TYPES class variable above Arguments: name -- User ID from the session handler CreateCredentials - If True, will create temporary credentials for S3 uploads Returns: Flask response returned will have key_url and key_id for each key in the request key_url is the S3 URL for uploading key_id is the job id to be passed to the finalize_submission route """ try: responseDict = {} fileNameMap = [] safeDictionary = RequestDictionary(self.request) submissionId = self.jobManager.createSubmission( name, safeDictionary) existingSubmission = False if safeDictionary.exists("existing_submission_id"): existingSubmission = True # Check if user has permission to specified submission self.checkSubmissionPermission( self.jobManager.getSubmissionById(submissionId)) # Build fileNameMap to be used in creating jobs for fileType in FileHandler.FILE_TYPES: # If filetype not included in request, and this is an update to an existing submission, skip it if not safeDictionary.exists(fileType): if existingSubmission: continue # This is a new submission, all files are required raise ResponseException( "Must include all files for new submission", StatusCode.CLIENT_ERROR) filename = safeDictionary.getValue(fileType) if (safeDictionary.exists(fileType)): if (not self.isLocal): uploadName = str( name) + "/" + s3UrlHandler.getTimestampedFilename( filename) else: uploadName = filename responseDict[fileType + "_key"] = uploadName fileNameMap.append((fileType, uploadName, filename)) if not fileNameMap and existingSubmission: raise ResponseException( "Must include at least one file for an existing submission", StatusCode.CLIENT_ERROR) if not existingSubmission: # Don't add external files to existing submission for extFileType in FileHandler.EXTERNAL_FILE_TYPES: filename = CONFIG_BROKER["".join( [extFileType, "_file_name"])] if (not self.isLocal): uploadName = str( name) + "/" + s3UrlHandler.getTimestampedFilename( filename) else: uploadName = filename responseDict[extFileType + "_key"] = uploadName fileNameMap.append((extFileType, uploadName, filename)) fileJobDict = self.jobManager.createJobs(fileNameMap, submissionId, existingSubmission) for fileType in fileJobDict.keys(): if (not "submission_id" in fileType): responseDict[fileType + "_id"] = fileJobDict[fileType] if (CreateCredentials and not self.isLocal): self.s3manager = s3UrlHandler(CONFIG_BROKER["aws_bucket"]) responseDict[ "credentials"] = self.s3manager.getTemporaryCredentials( name) else: responseDict["credentials"] = { "AccessKeyId": "local", "SecretAccessKey": "local", "SessionToken": "local", "Expiration": "local" } responseDict["submission_id"] = fileJobDict["submission_id"] if self.isLocal: responseDict["bucket_name"] = CONFIG_BROKER["broker_files"] else: responseDict["bucket_name"] = CONFIG_BROKER["aws_bucket"] return JsonResponse.create(StatusCode.OK, responseDict) except (ValueError, TypeError, NotImplementedError) as e: return JsonResponse.error(e, StatusCode.CLIENT_ERROR) except ResponseException as e: # Call error route directly, status code depends on exception return JsonResponse.error(e, e.status) except Exception as e: # Unexpected exception, this is a 500 server error return JsonResponse.error(e, StatusCode.INTERNAL_ERROR) except: return JsonResponse.error(Exception("Failed to catch exception"), StatusCode.INTERNAL_ERROR)