def storeToken(self, tokenObject): self.resetErrors() index = "resumptionTokens-STORE" table = "resumptionTokens" fields = { "token": tokenObject.getToken(), "metadataPrefix": tokenObject.getMetadataPrefix(), "expiry": Timestamp(tokenObject.getExpiry()), "nextToken": tokenObject.getNextToken(), "resultJson": "" } FileUtils.writeStringToFile( File( FascinatorHome.getPath("oaipmh-results") + "/" + tokenObject.getToken()), tokenObject.getResultJson()) #self.log.debug("=== storeToken()") #self.log.debug("=== TOKEN: '{}'", tokenObject.getToken()) #self.log.debug("=== METADATAPREFIX: '{}'", tokenObject.getMetadataPrefix()) #self.log.debug("=== EXPIRY: '{}'", tokenObject.getExpiry()) #self.log.debug("=== TOTALFOUND: '{}'", tokenObject.getTotalFound()) #self.log.debug("=== START: '{}'", tokenObject.getStart()) try: self.db.insert(self.dbName, index, table, fields) except Exception, e: msg = self.parseError(e) if msg == "Duplicate record!": # Expected failure self.log.error("Duplicate record already exists in table!") else: # Something is wrong self.log.error("ERROR: ", e) self.error = True self.errorMsg = msg
def storeToken(self, tokenObject): self.resetErrors() index = "resumptionTokens-STORE" table = "resumptionTokens" fields = { "token": tokenObject.getToken(), "metadataPrefix": tokenObject.getMetadataPrefix(), "expiry": Timestamp(tokenObject.getExpiry()), "nextToken": tokenObject.getNextToken(), "resultJson": "" } FileUtils.writeStringToFile(File(FascinatorHome.getPath("oaipmh-results")+ "/"+tokenObject.getToken()),tokenObject.getResultJson()) #self.log.debug("=== storeToken()") #self.log.debug("=== TOKEN: '{}'", tokenObject.getToken()) #self.log.debug("=== METADATAPREFIX: '{}'", tokenObject.getMetadataPrefix()) #self.log.debug("=== EXPIRY: '{}'", tokenObject.getExpiry()) #self.log.debug("=== TOTALFOUND: '{}'", tokenObject.getTotalFound()) #self.log.debug("=== START: '{}'", tokenObject.getStart()) try: self.db.insert(self.dbName, index, table, fields) except Exception, e: msg = self.parseError(e) if msg == "Duplicate record!": # Expected failure self.log.error("Duplicate record already exists in table!") else: # Something is wrong self.log.error("ERROR: ", e) self.error = True self.errorMsg = msg
def getKeysArray(self): keysFile = FileUtils.getFile(self.systemConfig.getString("", "api", "apiKeyFile")) keysFile.createNewFile() try: keysJsonSimple = JsonSimple(keysFile) except IOException, ParseException: self.log.warn("File may be blank. Creating empty json api keys file...") FileUtils.writeStringToFile(keysFile, '{"api": {"clients": []}}') keysJsonSimple = JsonSimple(keysFile)
def validate_package(self, zipped_package): working_dir, working_dir_path, unzipped_package_path = self._setup_working_dir() try: self._extract_package(zipped_package, str(unzipped_package_path.toAbsolutePath())) for output in self._execute_validation(working_dir): print output finally: FileUtils.deleteDirectory(working_dir.toFile())
def empty_user(self): strRootDir = request.getServletContext().getRealPath("/") strUserHtmlDir = strRootDir + "html" + File.separator + "user" + File.separator dirFile = File(strUserHtmlDir) if dirFile.exists() == False or dirFile.isDirectory() == False: request.setAttribute("errorMessage", u"用户缓存文件夹不存在!") return FileUtils.deleteQuietly(dirFile) request.setAttribute("errorMessage", u"删除所有用户缓存完毕!")
def writeResponseToStatusResponseCache(self, jobId, jobStatus): curationStatusRespones = File(FascinatorHome.getPath() + "/curation-status-responses") if curationStatusRespones.exists(): FileUtils.forceMkdir(curationStatusRespones) FileUtils.writeStringToFile( File(curationStatusRespones.getPath() + "/" + Integer(jobId).toString() + ".json"), jobStatus.toString(True))
def decode_archive(): try: print 'decoding archive...' encoded_archive_bytes = FileUtils.readFileToByteArray( File(node_archive_path)) decoded_archive_bytes = Base64.getMimeDecoder().decode( encoded_archive_bytes) FileUtils.writeByteArrayToFile(File(archive_name), decoded_archive_bytes) print 'successfully decoded archive' except: print 'Decoding application archive failed' print dumpStack() apply(traceback.print_exception, sys.exc_info()) exit(exitcode=1)
def onCall(self, value, fileStream, fileStream2): if not sponge.getVariable("demo.readOnly", False): uploaded = "Uploaded" uploadDir = "{}/upload/".format(sponge.home) # Single file. if fileStream.hasNext(): FileUtils.copyInputStreamToFile(fileStream.inputStream, File(uploadDir + fileStream.filename)) uploaded += " " + fileStream.filename print "Writing " + fileStream.filename # Multiple files. while fileStream2.hasNext(): fs2 = fileStream2.next() FileUtils.copyInputStreamToFile(fs2.inputStream, File(uploadDir + fs2.filename)) print "Writing " + fs2.filename uploaded += " " + fs2.filename return uploaded
def __createFromSelected(self): self.vc("log").debug("Creating package from selected...") packageType, jsonConfigFile = self.__getPackageTypeAndJsonConfigFile() #self.vc("log").debug("packageType = '{}'", packageType) #self.vc("log").debug("jsonConfigFile = '{}'", jsonConfigFile) # if modifying existing manifest, we already have an identifier, # otherwise create a new one manifestId = self.__getActiveManifestId() if manifestId is None: manifestHash = "%s.tfpackage" % uuid.uuid4() else: manifestHash = self.__getActiveManifestPid() # store the manifest file for harvesting packageDir = FascinatorHome.getPathFile("packages") packageDir.mkdirs() manifestFile = File(packageDir, manifestHash) outStream = FileOutputStream(manifestFile) outWriter = OutputStreamWriter(outStream, "UTF-8") manifest = self.__getActiveManifest() oldType = manifest.getType() if oldType is None: manifest.setType(packageType) else: manifest.setType(oldType) #self.vc("log").debug("Manifest: {}", manifest) outWriter.write(manifest.toString(True)) outWriter.close() try: if manifestId is None: # harvest the package as an object username = self.vc("sessionState").get("username") if username is None: username = "******" # necessary? harvester = None # set up config files, and make sure they are both deployed workflowsDir = FascinatorHome.getPathFile("harvest/workflows") configFile = self.__getFile(workflowsDir, jsonConfigFile) rulesFile = self.__getFile(workflowsDir, "packaging-rules.py") # run the harvest client with our packaging workflow config harvester = HarvestClient(configFile, manifestFile, username) harvester.start() manifestId = harvester.getUploadOid() harvester.shutdown() else: # update existing object object = StorageUtils.getDigitalObject(Services.getStorage(), manifestId) manifestStream = FileUtils.openInputStream(manifestFile) StorageUtils.createOrUpdatePayload(object, manifestHash, manifestStream) manifestStream.close() object.close() except Exception, ex: error = "Packager workflow failed: %s" % str(ex) log.error(error, ex) if harvester is not None: harvester.shutdown() return '{ "status": "failed" }'
def undeploy_package(self, zipped_package): working_dir, working_dir_path, undeploy_package_path = self._setup_working_dir() try: unzipped_package_path = Files.createDirectory(working_dir.resolve("undeploy_root")) self._extract_package(zipped_package, str(unzipped_package_path.toAbsolutePath())) package_xml_file = unzipped_package_path.resolve("package.xml") root = ET.parse(str(package_xml_file.toAbsolutePath())).getroot() ns = {'sf': 'http://soap.sforce.com/2006/04/metadata'} version = root.find('sf:version', ns).text Files.copy(package_xml_file, undeploy_package_path.resolve('destructiveChanges.xml')) Files.write(undeploy_package_path.resolve("package.xml"), bytearray(self.empty_package_template(version))) for output in self._execute(working_dir): print output finally: FileUtils.deleteDirectory(working_dir.toFile())
def getToken(self, tokenId): self.resetErrors() index = "resumptionTokens-GET" sql = """ SELECT * FROM resumptionTokens WHERE token = ? """ fields = [tokenId] try: result = self.db.select(self.dbName, index, sql, fields) # Make sure we got a response if result is None or result.isEmpty(): return None # Create the new token to return metadataPrefix = result.get(0).get("METADATAPREFIX") expiryStr = result.get(0).get("EXPIRY") # Jython does not support %f microseconds in time parsing, makes # this more awkward then it should be in 2.6+ Python # 1: split into basic time + micro seconds (basicTime, mSecs) = expiryStr.strip().split(".") # 2: Parse the basic time expiryDt = datetime.strptime(basicTime, "%Y-%m-%d %H:%M:%S") # 3: Convert into a 'epoch' long and then to a string (has an extra ".0" on the end) epoch = "%s" % time.mktime(expiryDt.timetuple()) # 4: Remove the extraneous trailing zero and re-attach microseconds expiry = "%s%s" % (epoch.replace(".0", ""), mSecs) nextToken = result.get(0).get("NEXTTOKEN") file = File( FascinatorHome.getPath("oaipmh-results") + "/" + tokenId) resultJson = FileUtils.readFileToString(file) FileUtils.deleteQuietly(file) token = ResumptionToken(tokenId, metadataPrefix, nextToken, resultJson) token.setExpiry(expiry) return token except Exception, e: # Something is wrong self.log.error("ERROR: ", e) self.error = True self.errorMsg = self.parseError(e) return None
def getYaraTargetFromGhidra(): yaraTargetPath = askFile('Choose a file where Ghidra Program bytes will be saved.', 'Choose file:') if yaraTargetPath is None: sys.exit(1) if os.path.exists(yaraTargetPath.getPath()): os.remove(yaraTargetPath.getPath()) CHUNK_SIZE = 4096 buf = jarray.zeros(CHUNK_SIZE,"b") fBytes = currentProgram.getMemory().getAllFileBytes().get(0) sizeFBytes = fBytes.getSize() for k in range(0, sizeFBytes+1, CHUNK_SIZE): count = fBytes.getOriginalBytes(k,buf,0,CHUNK_SIZE) if count == 0: break buf2 = buf[0:count] FileUtils.writeByteArrayToFile(yaraTargetPath, buf2, True) return yaraTargetPath.getPath()
def getToken(self, tokenId): self.resetErrors() index = "resumptionTokens-GET" sql = """ SELECT * FROM resumptionTokens WHERE token = ? """ fields = [tokenId] try: result = self.db.select(self.dbName, index, sql, fields) # Make sure we got a response if result is None or result.isEmpty(): return None # Create the new token to return metadataPrefix = result.get(0).get("METADATAPREFIX") expiryStr = result.get(0).get("EXPIRY") # Jython does not support %f microseconds in time parsing, makes # this more awkward then it should be in 2.6+ Python # 1: split into basic time + micro seconds (basicTime, mSecs) = expiryStr.strip().split(".") # 2: Parse the basic time expiryDt = datetime.strptime(basicTime, "%Y-%m-%d %H:%M:%S") # 3: Convert into a 'epoch' long and then to a string (has an extra ".0" on the end) epoch = "%s" % time.mktime(expiryDt.timetuple()) # 4: Remove the extraneous trailing zero and re-attach microseconds expiry = "%s%s" % (epoch.replace(".0", ""), mSecs) nextToken = result.get(0).get("NEXTTOKEN") file = File(FascinatorHome.getPath("oaipmh-results")+ "/"+tokenId) resultJson = FileUtils.readFileToString(file) FileUtils.deleteQuietly(file) token = ResumptionToken(tokenId, metadataPrefix,nextToken,resultJson) token.setExpiry(expiry) return token except Exception, e: # Something is wrong self.log.error("ERROR: ", e) self.error = True self.errorMsg = self.parseError(e) return None
def removeToken(self, tokenObject): self.resetErrors() index = "resumptionTokens-DELETE" table = "resumptionTokens" fields = { "token": tokenObject.getToken() } try: self.db.delete(self.dbName, index, table, fields) file = File(FascinatorHome.getPath("oaipmh-results")+ "/"+tokenObject.getToken()) FileUtils.deleteQuietly(file) self.log.info("Delete successful! TOKEN='{}'", tokenObject.getToken()) return True except Exception, e: # Something is wrong self.log.error("Delete failed! TOKEN='{}'", tokenObject.getToken()) self.log.error("ERROR: ", e) self.error = True self.errorMsg = self.parseError(e) return False
def getSavedGitUrl(): RELOAD_LOCK.lock() try: gitUrlFile = File(getGitUrlFile()) if gitUrlFile.isFile(): url = FileUtils.readFileToString(gitUrlFile).strip() return url if len(url) > 0 else None else: return None finally: RELOAD_LOCK.unlock()
def getSavedGitBranch(): RELOAD_LOCK.lock() try: gitBranchFile = File(getGitBranchFile()) if gitBranchFile.isFile(): branch = FileUtils.readFileToString(gitBranchFile).strip() return branch if len(branch) > 0 else None else: return None finally: RELOAD_LOCK.unlock()
def removeToken(self, tokenObject): self.resetErrors() index = "resumptionTokens-DELETE" table = "resumptionTokens" fields = {"token": tokenObject.getToken()} try: self.db.delete(self.dbName, index, table, fields) file = File( FascinatorHome.getPath("oaipmh-results") + "/" + tokenObject.getToken()) FileUtils.deleteQuietly(file) self.log.info("Delete successful! TOKEN='{}'", tokenObject.getToken()) return True except Exception, e: # Something is wrong self.log.error("Delete failed! TOKEN='{}'", tokenObject.getToken()) self.log.error("ERROR: ", e) self.error = True self.errorMsg = self.parseError(e) return False
def execute(self): unitService = __jitar__.unitService siteNavService = __spring__.getBean("siteNavService") unit_list = unitService.getAllUnitOrChildUnitList(None) print unit_list for u in unit_list: self.siteNavArray = [] unitId = u.unitId site_list = siteNavService.getAllSiteNav(True, 1, unitId) for sn in site_list: if self.checkExists(sn) == True: siteNavService.deleteSiteNav(sn) else: self.siteNavArray.append(sn) strFile = request.getServletContext().getRealPath("/") strFile = strFile + "html" + File.separator + "unit" + File.separator file = File(strFile) if file.exists(): FileUtils.deleteDirectory(file) response.writer.write(u"代码已经执行完毕。")
def onCall(self, filename): if not sponge.getVariable("demo.readOnly", False): uploaded = "Uploaded" uploadDir = "{}/upload/".format(sponge.home) file = File(uploadDir + filename) streamValue = OutputStreamValue(lambda output: FileUtils.copyFile(file, output)).withHeader( u"Content-Disposition", 'attachment; filename="{}"'.format(filename)) mimeType = Files.probeContentType(file.toPath()) if mimeType: streamValue.withContentType(mimeType) return streamValue
def deleteDirectory(self, sPath): # 如果sPath不以文件分隔符结尾,自动添加文件分隔符 dirFile = File(sPath) # 如果dir对应的文件不存在,或者不是一个目录,则退出 if dirFile.exists() == False or dirFile.isDirectory() == False: return FileUtils.deleteQuietly(dirFile) return """ 换一种新的方法,以下代码不用了 """ # 删除文件夹下的所有文件(包括子目录) files = dirFile.listFiles() if files == None or len(files) == 0: return for f in files: # 删除子文件 if f.isFile(): f.delete() else: self.deleteDirectory(f.getAbsolutePath()) #/删除当前目录 dirFile.delete() dirFile = None
def __activate__(self, context): self.auth = context["page"].authentication self.errorMsg = "" self.request = context["request"] self.response = context["response"] self.formData = context["formData"] self.log = context["log"] self.reportManager = context["Services"].getService("reportManager") self.reportName = None if (self.auth.is_logged_in()): if (self.auth.is_admin()==True): pass else: self.errorMsg = "Requires Admin / Librarian / Reviewer access." else: self.errorMsg = "Please login." if self.errorMsg == "": self.reportName = self.formData.get("reportName") if (self.reportName): self.report = self.reportManager.getReport(self.reportName) self.func = self.formData.get("func", "") if self.func == "" and self.request.getParameter("func"): self.func = self.request.getParameter("func") if self.func == "action": self.action = self.request.getParameter("action") if self.action == "create": self.createReport() out = self.response.getPrintWriter("text/plain; charset=UTF-8") out.println("{\"id\":\""+self.report.getReportName()+"\"}") out.close() return if self.action == "edit": self.editReport() out = self.response.getPrintWriter("text/plain; charset=UTF-8") out.println("{\"id\":\""+self.report.getReportName()+"\"}") out.close() return if self.action == "options": out = self.response.getPrintWriter("text/plain; charset=UTF-8") out.println(FileUtils.readFileToString(File(FascinatorHome.getPath("reports")+"/reportCriteriaOptions.json"))) out.close() return if self.action == "get-json": out = self.response.getPrintWriter("text/plain; charset=UTF-8") report = self.reportManager.getReports().get(self.request.getParameter("reportName")) queryFilters = report.config.getObject("query", "filter") jsonMap = HashMap() elementIds = ArrayList() for elementId in queryFilters: elementIds.add(elementId) Collections.sort(elementIds) for elementId in elementIds: jsonMap.put(elementId,queryFilters.get(elementId).get("value")) jsonMap.put("reportName",report.getLabel()) JsonObject.writeJSONString(jsonMap,out) out.close() return
def __handleAlert(self, file): '''Sends the alert off to the relevant handler and then pushes the metadata to ReDBox Parameters: file -- the file (path, not object) to be processed ''' successCount = 0 failedCount = 0 handler = None self.logInfo(file, "Processing file " + file) self.__log.info("Alert system is processing file %s" % file) ext = file.rpartition('.')[2] if not ext in self.handlers: self.logInfo( file, "Did not process file as extension is not configured") return (0, 0) #Add the timestamp to a copy of baseline baseline = dict(self.baseline) timestamp = time.gmtime(os.path.getmtime(self.pBase(file))) for field in self.timestampFields: val = time.strftime("%Y-%m-%d %H:%M:%S", timestamp) baseline[field] = val if self.handlers[ext] == "CSVAlertHandler": config = self.config['CSVAlertHandlerParams']['configMap'][ext] handler = CSVAlertHandler(self.pBase(file), config, baseline) self.logInfo( file, "Using the CSVAlertHandler for file with extension %s" % ext) elif self.handlers[ext] == "XMLAlertHandler": config = self.config['XMLAlertHandlerParams']['configMap'][ext] handler = XMLAlertHandler(self.pBase(file), config, baseline) self.logInfo( file, "Using the XMLAlertHandler for file with extension %s" % ext) else: raise AlertException("Unknown file handler: '%s'" % self.handlers[ext]) jsonList = handler.process() if jsonList is None: self.logInfo(file, "No records were returned.") return (0, 0) ## Now all of the JSON Objects need to be ingested into the tool chain id = 0 for json in jsonList: id += 1 #use an incremental filename in case the data file contains more than 1 record meta_file_name = "%s.%s" % (file, id) meta_file = self.pTemp(meta_file_name) self.logInfo(file, "Using metadata file: %s" % meta_file) try: oid = self.__ingestJson(file, meta_file, json) successCount += 1 except Exception, e: failedCount += 1 self.logInfo( file, "Moving failed metadata file [%s] to %s." % (meta_file, self.__DIR_FAILED)) filepath = os.path.join(self.__DIR_FAILED, meta_file_name) FileUtils.moveFile(File(meta_file), File(filepath)) continue self.logInfo( file, "Moving successful metadata file [%s] to %s." % (meta_file, self.__DIR_SUCCESS)) filepath = os.path.join(self.__DIR_SUCCESS, meta_file_name) #python library seems to dislike mixed \ and / in path FileUtils.moveFile(File(meta_file), File(filepath))
def execute(self): if self.loginUser == None: request.setAttribute("error", u"请重新登录。") return "/WEB-INF/ftl/show_resource_swf.ftl" param = ParamUtil(request) resourceId = self.params.safeGetIntParam("resource") resourceService = __jitar__.getResourceService() if resourceId == 0: request.setAttribute("error", u"缺少文件信息。") return "/WEB-INF/ftl/show_resource_swf.ftl" resource = resourceService.getResource(resourceId) if resource == None: request.setAttribute("error", u"无法加载资源。") return "/WEB-INF/ftl/show_resource_swf.ftl" #======================================================================= # enc = EncryptDecrypt("zhongjiaoqixing") # try: # resfile = enc.decrypt(resfile) # except: # request.setAttribute("error", u"解压缩过程中出现错误。") # return "/WEB-INF/ftl/show_resource.ftl" # finally: # encc = None #======================================================================= try: resfile = resource.href filename = resfile.lower() fileUserConfigPath = request.getSession().getServletContext( ).getInitParameter("userPath") if fileUserConfigPath == None or fileUserConfigPath == "": resfileapth = request.getSession().getServletContext( ).getRealPath("/" + resfile) else: if fileUserConfigPath.endswith("\\") == False: fileUserConfigPath = fileUserConfigPath + "\\" resfileapth = fileUserConfigPath + resfile.replace("/", "\\") ff = File(resfileapth) if ff.isFile() == False or ff.exists() == False: request.setAttribute("error", u"资源文件不存在,无法进行预览。") return "/WEB-INF/ftl/show_resource_swf.ftl" #这些文件直接显示 if filename.endswith(".jpg") or filename.endswith( ".gif") or filename.endswith(".png"): response.sendRedirect(CommonUtil.getSiteUrl(request) + resfile) return if filename.endswith(".txt"): content = FileUtils.readFileToString( ff, CommonUtil.getFileEncoding(resfileapth)) request.setAttribute("content", content) return "/WEB-INF/ftl/show_resource_txt.ftl" #PDF2SWFPath = request.getSession().getServletContext().getInitParameter("pdf2swfPath") #if PDF2SWFPath == None or PDF2SWFPath == "": #request.setAttribute("error", u"没有配置文件转换服务。") #return "/WEB-INF/ftl/show_resource_swf.ftl" if filename.endswith(".doc") or filename.endswith( ".docx" ): # or filename.endswith(".ppt") or filename.endswith(".pptx") or filename.endswith(".xls") or filename.endswith(".xlsx"): swf = resfileapth[0:resfileapth.find(".")] + ".swf" file = File(swf) if file.isFile() == False or file.exists() == False: #converter = DocConverter(JacobPDFConverter(), SWFToolsSWFConverter(PDF2SWFPath)) #converter.convert(resfileapth) #converter = None #转换服务 server_ip = request.getSession().getServletContext( ).getInitParameter("server_ip") server_port = request.getSession().getServletContext( ).getInitParameter("server_port") timeout = request.getSession().getServletContext( ).getInitParameter("timeout") fcs = FCSConverter() fcs.fcs( server_ip, server_port, timeout, resource.title, resfileapth[0:resfileapth.find(".")] + "." + filename.split(".")[-1], file.toString(), "0") request.setAttribute("resourceId", resourceId) request.setAttribute( "showWaiting", self.params.safeGetStringParam("showWaiting")) swf = CommonUtil.getSiteUrl( request) + resfile[0:resfile.find(".")] + ".swf" request.setAttribute("swf", swf) return "/WEB-INF/ftl/show_resource_swf.ftl" #if filename.endswith(".pdf"): #swf = resfileapth[0:resfileapth.find(".")] + ".swf" #file = File(swf) #if file.isFile() == False or file.exists() == False: #converter = SWFToolsSWFConverter(PDF2SWFPath) #converter.convert2SWF(resfileapth) #converter = None #request.setAttribute("resourceId", resourceId) #request.setAttribute("showWaiting", self.params.safeGetStringParam("showWaiting")) #swf = CommonUtil.getSiteUrl(request) + resfile[0:resfile.find(".")] + ".swf" #request.setAttribute("swf", swf) #return "/WEB-INF/ftl/show_resource_swf.ftl" if filename.endswith(".swf"): file = File(resfileapth) if file.isFile() == False or file.exists() == False: request.setAttribute("error", u"swf 文件不存在,无法进行预览。") return "/WEB-INF/ftl/show_resource_swf.ftl" swf = CommonUtil.getSiteUrl(request) + resfile request.setAttribute("orginIsSwf", "") request.setAttribute("swf", swf) return "/WEB-INF/ftl/show_resource_swf.ftl" request.setAttribute("error", u"此文件不支持预览。") return "/WEB-INF/ftl/show_resource_swf.ftl" except BaseException, varExption: if varExption != None: request.setAttribute( "error", u"预览此文件时出错,并且已经被捕获。<br/><br/>" + str(varExption)) else: request.setAttribute("error", u"预览此文件时出错,并且并且没有捕获到错误信息。") return "/WEB-INF/ftl/show_resource_swf.ftl"
def __activate__(self, context): self.auth = context["page"].authentication self.errorMsg = "" self.request = context["request"] self.response = context["response"] self.formData = context["formData"] self.log = context["log"] self.reportManager = context["Services"].getService("reportManager") self.reportName = None if (self.auth.is_logged_in()): if (self.auth.is_admin() == True): pass else: self.errorMsg = "Requires Admin / Librarian / Reviewer access." else: self.errorMsg = "Please login." if self.errorMsg == "": self.reportName = self.formData.get("reportName") if (self.reportName): self.report = self.reportManager.getReport(self.reportName) self.func = self.formData.get("func", "") if self.func == "" and self.request.getParameter("func"): self.func = self.request.getParameter("func") if self.func == "action": self.action = self.request.getParameter("action") if self.action == "create": self.createReport() out = self.response.getPrintWriter( "text/plain; charset=UTF-8") out.println("{\"id\":\"" + self.report.getReportName() + "\"}") out.close() return if self.action == "edit": self.editReport() out = self.response.getPrintWriter( "text/plain; charset=UTF-8") out.println("{\"id\":\"" + self.report.getReportName() + "\"}") out.close() return if self.action == "options": out = self.response.getPrintWriter( "text/plain; charset=UTF-8") out.println( FileUtils.readFileToString( File( FascinatorHome.getPath("reports") + "/reportCriteriaOptions.json"))) out.close() return if self.action == "get-json": out = self.response.getPrintWriter( "text/plain; charset=UTF-8") report = self.reportManager.getReports().get( self.request.getParameter("reportName")) queryFilters = report.config.getObject("query", "filter") jsonMap = HashMap() elementIds = ArrayList() for elementId in queryFilters: elementIds.add(elementId) Collections.sort(elementIds) for elementId in elementIds: jsonMap.put(elementId, queryFilters.get(elementId).get("value")) jsonMap.put("reportName", report.getLabel()) JsonObject.writeJSONString(jsonMap, out) out.close() return
def onCall(self): file = "{}/resources/index.html".format(sponge.home) return OutputStreamValue(lambda output: FileUtils.copyFile(File(file), output)).withContentType("text/html; charset=\"UTF-8\"")
def processExperiment(self, experimentNode): """Register an IExperimentUpdatable based on the Experiment XML node. @param experimentNode An XML node corresponding to an Experiment @return IExperimentUpdatable experiment """ # Get the experiment version expVersion = experimentNode.attrib.get("version") if expVersion is None: expVersion = "0" # Get the openBIS identifier openBISIdentifier = experimentNode.attrib.get("openBISIdentifier") # Get the experiment name expName = experimentNode.attrib.get("name") # Get the experiment date and reformat it to be compatible # with postgreSQL expDate = self.formatExpDateForPostgreSQL(experimentNode.attrib.get("date")) # Get the description description = experimentNode.attrib.get("description") # Get the acquisition hardware acqHardware = experimentNode.attrib.get("acq_hardware") # Get the acquisition software acqSoftware = experimentNode.attrib.get("acq_software") # Get the owner name owner = experimentNode.attrib.get("owner_name") # Get attachments attachments = experimentNode.attrib.get("attachments") # Create the experiment (with corrected ID if needed: see above) openBISExperiment = self.createExperiment(openBISIdentifier, expName) if not openBISExperiment: msg = "Could not create experiment " + openBISIdentifier self._logger.error(msg) raise Exception(msg) # Get comma-separated tag list tagList = experimentNode.attrib.get("tags") if tagList != None and tagList != "": # Retrieve or create the tags openBISTags = self.retrieveOrCreateTags(tagList) # Set the metaprojects (tags) for openBISTag in openBISTags: openBISTag.addEntity(openBISExperiment) # Set the experiment version openBISExperiment.setPropertyValue("LSR_FORTESSA_EXPERIMENT_VERSION", expVersion) # Set the date openBISExperiment.setPropertyValue("LSR_FORTESSA_EXPERIMENT_DATE", expDate) # Set the description openBISExperiment.setPropertyValue("LSR_FORTESSA_EXPERIMENT_DESCRIPTION", description) # Set the acquisition hardware openBISExperiment.setPropertyValue("LSR_FORTESSA_EXPERIMENT_ACQ_HARDWARE", acqHardware) # Set the acquisition hardware friendly name openBISExperiment.setPropertyValue("LSR_FORTESSA_EXPERIMENT_ACQ_HARDWARE_FRIENDLY_NAME", self._machinename) # Set the acquisition software openBISExperiment.setPropertyValue("LSR_FORTESSA_EXPERIMENT_ACQ_SOFTWARE", acqSoftware) # Set the experiment owner openBISExperiment.setPropertyValue("LSR_FORTESSA_EXPERIMENT_OWNER", owner) # Add the attachments if attachments is not None: # Extract all relative file names attachmentFiles = attachments.split(";") for f in attachmentFiles: # This is an additional security step if f == '': continue # Inform msg = "Adding file attachment " + f self._logger.info(msg) # Build the full path attachmentFilePath = os.path.join(self._incoming.getAbsolutePath(), f) # Extract the file name attachmentFileName = os.path.basename(attachmentFilePath) # Read the attachment into a byte array javaFile = java.io.File(attachmentFilePath) byteArray = FileUtils.readFileToByteArray(javaFile) # Add attachment openBISExperiment.addAttachment(attachmentFilePath, attachmentFileName, "", byteArray) # Return the openBIS Experiment object return openBISExperiment
def getUrls(self): return FileUtils.readLines(self.__getHistoryFile())
def saveGitUrl(url): RELOAD_LOCK.lock() try: FileUtils.writeStringToFile(File(getGitUrlFile()), url) finally: RELOAD_LOCK.unlock()
def saveGitBranch(branch): RELOAD_LOCK.lock() try: FileUtils.writeStringToFile(File(getGitBranchFile()), branch) finally: RELOAD_LOCK.unlock()
def get_file_contents(self, path): return FileUtils.readFileToString(File(path))
def writeResponseToStatusResponseCache(self, jobId, jobStatus): curationStatusRespones = File(FascinatorHome.getPath()+ "/curation-status-responses") if curationStatusRespones.exists(): FileUtils.forceMkdir(curationStatusRespones) FileUtils.writeStringToFile(File(curationStatusRespones.getPath()+ "/" + Integer(jobId).toString() + ".json"), jobStatus.toString(True))
def processExperiment(self, experimentNode, openBISExpType="MICROSCOPY_EXPERIMENT"): """Register an IExperiment based on the Experiment XML node. @param experimentNode An XML node corresponding to an Experiment @param openBISExpType The experiment type @return IExperiment experiment """ # Get the experiment version expVersion = experimentNode.attrib.get("version") if expVersion is None: expVersion = "0" # Get the openBIS identifier openBISIdentifier = experimentNode.attrib.get("openBISIdentifier") # Get the experiment name expName = experimentNode.attrib.get("name") # Get the experiment date and reformat it to be compatible # with postgreSQL # TODO: Add this # expDate = self.formatExpDateForPostgreSQL(experimentNode.attrib.get("date")) # Get the description description = experimentNode.attrib.get("description") # Get the acquisition hardware # TODO: Add this # acqHardware = experimentNode.attrib.get("acq_hardware") # Get the acquisition software # TODO: Add this # acqSoftware = experimentNode.attrib.get("acq_software") # Get the owner name # TODO: Add this # owner = experimentNode.attrib.get("owner_name") # Get attachments attachments = experimentNode.attrib.get("attachments") # Make sure to keep the code length within the limits imposed by # openBIS for codes if len(openBISIdentifier) > 41: openBISIdentifier = openBISIdentifier[0:41] # Create univocal ID openBISIdentifier = openBISIdentifier + "_" + self.getCustomTimeStamp() # Make sure to create a new Experiment openBISExperiment = self._transaction.createNewExperiment(openBISIdentifier, openBISExpType) print(type(openBISExperiment)) if not openBISExperiment: msg = "PROCESSOR::processExperiment(): " + \ "Could not create experiment " + openBISIdentifier self._logger.error(msg) raise Exception(msg) # Get comma-separated tag list tagList = experimentNode.attrib.get("tags") if tagList != None and tagList != "": # Retrieve or create the tags openBISTags = self.retrieveOrCreateTags(tagList) # Set the metaprojects (tags) for openBISTag in openBISTags: openBISTag.addEntity(openBISExperiment) # Set the date # TODO: Add this # openBISExperiment.setPropertyValue("MICROSCOPY_EXPERIMENT_DATE", # expDate) # Store the name openBISExperiment.setPropertyValue("MICROSCOPY_EXPERIMENT_NAME", expName) # Set the experiment version openBISExperiment.setPropertyValue("MICROSCOPY_EXPERIMENT_VERSION", expVersion) # Set the description -- but only if is not empty. # This makes sure that the description of an already existing experiment # is not overridden by an empty string. if description != "": openBISExperiment.setPropertyValue("MICROSCOPY_EXPERIMENT_DESCRIPTION", description) else: currentDescription = openBISExperiment.getPropertyValue("MICROSCOPY_EXPERIMENT_DESCRIPTION") if (currentDescription is None or currentDescription == ""): openBISExperiment.setPropertyValue("MICROSCOPY_EXPERIMENT_DESCRIPTION", "") # TODO: Add this # openBISExperiment.setPropertyValue("MICROSCOPY_EXPERIMENT_ACQ_HARDWARE", # acqHardware) # Set the acquisition hardware friendly name openBISExperiment.setPropertyValue("MICROSCOPY_EXPERIMENT_ACQ_HARDWARE_FRIENDLY_NAME", self._machinename) # Set the acquisition software # TODO: Add this # openBISExperiment.setPropertyValue("MICROSCOPY_EXPERIMENT_ACQ_SOFTWARE", # acqSoftware) # Set the experiment owner # TODO: Add this # openBISExperiment.setPropertyValue("MICROSCOPY_EXPERIMENT_OWNER", # owner) # Add the attachments if attachments is not None: # Extract all relative file names attachmentFiles = attachments.split(";") for f in attachmentFiles: # This is an additional security step if f == '': continue # Inform msg = "Adding file attachment " + f self._logger.info(msg) # Build the full path attachmentFilePath = os.path.join(self._incoming.getAbsolutePath(), f) # Extract the file name attachmentFileName = os.path.basename(attachmentFilePath) # Read the attachment into a byte array javaFile = java.io.File(attachmentFilePath) byteArray = FileUtils.readFileToByteArray(javaFile) # Add attachment openBISExperiment.addAttachment(attachmentFilePath, attachmentFileName, "", byteArray) # Return the openBIS Experiment object return openBISExperiment
# # THIS CODE AND INFORMATION ARE PROVIDED "AS IS" WITHOUT WARRANTY OF ANY KIND, EITHER EXPRESSED OR # IMPLIED, INCLUDING BUT NOT LIMITED TO THE IMPLIED WARRANTIES OF MERCHANTABILITY AND/OR FITNESS # FOR A PARTICULAR PURPOSE. THIS CODE AND INFORMATION ARE NOT SUPPORTED BY XEBIALABS. # from java.io import File from java.net import URL from org.apache.commons.io import FileUtils from com.perfectomobile.selenium import MobileDriver driver = MobileDriver(perfectomobileServer['url'], perfectomobileServer['username'], perfectomobileServer['password']) try: file = File.createTempFile("application","maf") FileUtils.copyURLToFile(URL(applicationUrl), file, 300000, 300000) driver.uploadMedia(repositoryKey, file) for key in deviceIds: device = driver.getDevice(key) device.open() device.installApplication(repositoryKey) finally: driver.quit()
def processExperiment(self, experimentNode): """Register an IExperimentUpdatable based on the Experiment XML node. @param experimentNode An XML node corresponding to an Experiment @return IExperimentUpdatable experiment """ # Get the experiment version expVersion = experimentNode.attrib.get("version") if expVersion is None: expVersion = "0" # Get the openBIS identifier openBISIdentifier = experimentNode.attrib.get("openBISIdentifier") # Get the experiment name expName = experimentNode.attrib.get("name") # Get the experiment date and reformat it to be compatible # with postgreSQL expDate = self.formatExpDateForPostgreSQL( experimentNode.attrib.get("date")) # Get the description description = experimentNode.attrib.get("description") # Get the acquisition hardware acqHardware = experimentNode.attrib.get("acq_hardware") # Get the acquisition software acqSoftware = experimentNode.attrib.get("acq_software") # Get the owner name owner = experimentNode.attrib.get("owner_name") # Get attachments attachments = experimentNode.attrib.get("attachments") # Create the experiment (with corrected ID if needed: see above) openBISExperiment = self.createExperiment(openBISIdentifier, expName) if not openBISExperiment: msg = "Could not create experiment " + openBISIdentifier self._logger.error(msg) raise Exception(msg) # Get comma-separated tag list tagList = experimentNode.attrib.get("tags") if tagList != None and tagList != "": # Retrieve or create the tags openBISTags = self.retrieveOrCreateTags(tagList) # Set the metaprojects (tags) for openBISTag in openBISTags: openBISTag.addEntity(openBISExperiment) # Set the experiment version openBISExperiment.setPropertyValue("FACS_ARIA_EXPERIMENT_VERSION", expVersion) # Set the date openBISExperiment.setPropertyValue("FACS_ARIA_EXPERIMENT_DATE", expDate) # Set the description openBISExperiment.setPropertyValue("FACS_ARIA_EXPERIMENT_DESCRIPTION", description) # Set the acquisition hardware openBISExperiment.setPropertyValue("FACS_ARIA_EXPERIMENT_ACQ_HARDWARE", acqHardware) # Set the acquisition hardware friendly name openBISExperiment.setPropertyValue( "FACS_ARIA_EXPERIMENT_ACQ_HARDWARE_FRIENDLY_NAME", self._machinename) # Set the acquisition software openBISExperiment.setPropertyValue("FACS_ARIA_EXPERIMENT_ACQ_SOFTWARE", acqSoftware) # Set the experiment owner openBISExperiment.setPropertyValue("FACS_ARIA_EXPERIMENT_OWNER", owner) # Add the attachments if attachments is not None: # Extract all relative file names attachmentFiles = attachments.split(";") for f in attachmentFiles: # This is an additional security step if f == '': continue # Inform msg = "Adding file attachment " + f self._logger.info(msg) # Build the full path attachmentFilePath = os.path.join( self._incoming.getAbsolutePath(), f) # Extract the file name attachmentFileName = os.path.basename(attachmentFilePath) # Read the attachment into a byte array javaFile = java.io.File(attachmentFilePath) byteArray = FileUtils.readFileToByteArray(javaFile) # Add attachment openBISExperiment.addAttachment(attachmentFilePath, attachmentFileName, "", byteArray) # Return the openBIS Experiment object return openBISExperiment
def saveUrl(self, url): historyFile = self.__getHistoryFile() if historyFile.exists(): urls = FileUtils.readLines(historyFile) urls.add(url) FileUtils.writeLines(historyFile, [url])
def __createFromSelected(self): self.vc("log").debug("Creating package from selected...") packageType, jsonConfigFile = self.__getPackageTypeAndJsonConfigFile() #self.vc("log").debug("packageType = '{}'", packageType) #self.vc("log").debug("jsonConfigFile = '{}'", jsonConfigFile) # if modifying existing manifest, we already have an identifier, # otherwise create a new one manifestId = self.__getActiveManifestId() if manifestId is None: manifestHash = "%s.tfpackage" % uuid.uuid4() else: manifestHash = self.__getActiveManifestPid() # store the manifest file for harvesting packageDir = FascinatorHome.getPathFile("packages") packageDir.mkdirs() manifestFile = File(packageDir, manifestHash) outStream = FileOutputStream(manifestFile) outWriter = OutputStreamWriter(outStream, "UTF-8") manifest = self.__getActiveManifest() oldType = manifest.getType() if oldType is None: manifest.setType(packageType) else: manifest.setType(oldType) self.vc("log").debug("Manifest: %s" % manifest) outWriter.write(manifest.toString(True)) outWriter.close() try: if manifestId is None: # harvest the package as an object username = self.vc("sessionState").get("username") if username is None: username = "******" # necessary? harvester = None # set up config files, and make sure they are both deployed workflowsDir = FascinatorHome.getPathFile("harvest/workflows") configFile = self.__getFile(workflowsDir, jsonConfigFile) rulesFile = self.__getFile(workflowsDir, "packaging-rules.py") # run the harvest client with our packaging workflow config harvester = HarvestClient(configFile, manifestFile, username) harvester.start() manifestId = harvester.getUploadOid() harvester.shutdown() else: # update existing object object = StorageUtils.getDigitalObject(Services.getStorage(), manifestId) manifestStream = FileUtils.openInputStream(manifestFile) StorageUtils.createOrUpdatePayload(object, manifestHash, manifestStream) manifestStream.close() object.close() except Exception, ex: error = "Packager workflow failed: %s" % str(ex) self.vc("log").error(error, ex) if harvester is not None: harvester.shutdown() return '{ "status": "failed" }'
def save_screenshot(self, filepath): tempfile = self.__firefoxDriver.getScreenshotAs(OutputType.FILE) FileUtils.copyFile(tempfile, File(filepath))
def __handleAlert(self, file): '''Sends the alert off to the relevant handler and then pushes the metadata to ReDBox Parameters: file -- the file (path, not object) to be processed ''' successCount = 0 failedCount = 0 handler = None self.logInfo(file, "Processing file " + file) self.__log.info("Alert system is processing file %s" % file) ext = file.rpartition('.')[2] if not ext in self.handlers: self.logInfo(file, "Did not process file as extension is not configured") return (0,0) #Add the timestamp to a copy of baseline baseline = dict(self.baseline) timestamp = time.gmtime(os.path.getmtime(self.pBase(file))) for field in self.timestampFields: val = time.strftime("%Y-%m-%d %H:%M:%S", timestamp) baseline[field] = val if self.handlers[ext] == "CSVAlertHandler": config = self.config['CSVAlertHandlerParams']['configMap'][ext] handler = CSVAlertHandler(self.pBase(file), config, baseline) self.logInfo(file, "Using the CSVAlertHandler for file with extension %s" % ext) elif self.handlers[ext] == "XMLAlertHandler": config = self.config['XMLAlertHandlerParams']['configMap'][ext] handler = XMLAlertHandler(self.pBase(file), config, baseline) self.logInfo(file, "Using the XMLAlertHandler for file with extension %s" % ext) else: raise AlertException("Unknown file handler: '%s'" % self.handlers[ext]) jsonList = handler.process() if jsonList is None: self.logInfo(file, "No records were returned.") return(0,0) ## Now all of the JSON Objects need to be ingested into the tool chain id = 0 for json in jsonList: id += 1 #use an incremental filename in case the data file contains more than 1 record meta_file_name = "%s.%s" % (file,id) meta_file = self.pTemp(meta_file_name) self.logInfo(file, "Using metadata file: %s" % meta_file) try: oid = self.__ingestJson(file, meta_file, json) successCount += 1 except Exception, e: failedCount += 1 self.logInfo(file, "Moving failed metadata file [%s] to %s." % (meta_file, self.__DIR_FAILED)) filepath = os.path.join(self.__DIR_FAILED,meta_file_name) FileUtils.moveFile(File(meta_file),File(filepath)); continue self.logInfo(file, "Moving successful metadata file [%s] to %s." % (meta_file, self.__DIR_SUCCESS)) filepath = os.path.join(self.__DIR_SUCCESS,meta_file_name) #python library seems to dislike mixed \ and / in path FileUtils.moveFile(File(meta_file),File(filepath));