def load_zones(app): """Read favourite zones """ app.zones = [] favZonesDir = File(File.separator.join([app.SCRIPTDIR, "configuration", "favourite_zones"])) for zoneFile in sorted(favZonesDir.listFiles()): name = zoneFile.getName()[:-4] fileName = zoneFile.getPath() zoneFile = open(fileName) fileContent = zoneFile.read() if len(fileContent.split("|")) == 2: geomString, country = fileContent.split("|") country = country.upper() else: geomString = fileContent country = "" zoneFile.close() zType = "rectangle" try: Integer.parseInt(geomString[0]) except NumberFormatException: zType = "polygon" zone = Zone(app, name, zType, geomString, country) app.zones.append(zone) if name == app.favZoneName: app.favZone = zone #Fav zone is active but its data is missing if app.favouriteZoneStatus and app.favZone is None: app.favouriteZoneStatus = False
def get_path_with_real_case(filename): from java.io import File # noqa f = File(filename) ret = f.getCanonicalPath() if IS_PY2 and not isinstance(ret, str): return ret.encode(getfilesystemencoding()) return ret
def getWatcherConfig(self): json = JsonConfig() watcherPath = json.get("watcher/path", "${fascinator.home}/watcher)") configFile = File("%s/app/config.json" % watcherPath) if configFile.exists(): return JsonConfigHelper(configFile) return None
def getcwd(): """getcwd() -> path Return a string representing the current working directory. """ foo = File(File("foo").getAbsolutePath()) return foo.getParent()
def test_image_example_dir_iteration(self): f = File(str(inspect.getfile( inspect.currentframe() ))) example_dir = File(File(f.getParentFile().getParentFile().getParentFile(),"character_examples"),"A") image_example_dir = ImageExampleDir(example_dir.getCanonicalPath()) for label, image in image_example_dir: if label != "A": raise "The label of the examples in this dir should be A"
def registerExtenderCallbacks(self, callbacks): self.autosave = None self.callbacks = callbacks if len(self.args) > 1 and not self.args[-1].startswith('-'): fname = self.args[1].strip("'") f = File(fname) if f.exists(): callbacks.issueAlert('Loading %s..' % fname) callbacks.restoreState(f) conf = callbacks.saveConfig() self.autosave = True if conf['suite.doAutoSave'] else False conf['suite.doAutoSave'] = 'false' active_type = conf['scanner.activecustomscopetype'] conf['scanner.activecustomscopetype'] = '0' callbacks.loadConfig(conf) print "Deactivate doAutoSave (%s -> false)" % (self.autosave,) print "Deactivate active scanner (%s -> 0)" % (active_type,) if self.autosave is None: conf = callbacks.saveConfig() print "Activate doAutoSave (%s -> true)" % (conf['suite.doAutoSave'],) conf['suite.doAutoSave'] = 'true' active_type = conf['scanner.activecustomscopetype'] conf['scanner.activecustomscopetype'] = '0' print "Deactivate active scanner (%s -> 0)" % (active_type,) callbacks.loadConfig(conf)
def __getFile(self, packageDir, filename): file = File(packageDir, filename) if not file.exists(): out = FileOutputStream(file) IOUtils.copy(Services.getClass().getResourceAsStream("/workflows/" + filename), out) out.close() return file
def validateDirectory(Framework): exportDirectory = Framework.getTriggerCIData("Export Directory") if exportDirectory != None and exportDirectory != "": dir = File(exportDirectory) if dir.exists() and dir.isDirectory(): return 1 return 0
def samefile(path, path2): """Test whether two pathnames reference the same actual file""" path = _tostr(path, "samefile") path2 = _tostr(path2, "samefile") f = File(path) f2 = File(path2) return f.getCanonicalPath() == f2.getCanonicalPath()
def run_ant_target(build_file, target): # Initializing DefaultLogger for all ant logs to go to console ant_logger = DefaultLogger() # Initializing Log4jListener to be able to log all ANT events to log file. log_file = Log4jListener() ant_logger.setErrorPrintStream(System.err) ant_logger.setOutputPrintStream(System.out) ant_logger.setMessageOutputLevel(Project.MSG_INFO) build_fd = File(build_file) project = Project() if rb_config is not None: enum = rb_config.keys() while enum.hasMoreElements(): key = enum.nextElement() project.setProperty(key, rb_config.getProperty(key)) project.setUserProperty('ant.file', build_fd.getAbsolutePath()) project.addBuildListener(ant_logger) project.addBuildListener(log_file) project.init() helper = ProjectHelper.getProjectHelper() project.addReference('ant.projectHelper', helper) helper.parse(project, build_fd) project.executeTarget(target)
def createFileChooserDialog(filters, filename, prefs, prefkey, multiselect): """ Creates a file chooser dialog that remembers its last directory. """ fileChooser = JFileChooser() # Add filters if not hasattr(filters, '__iter__'): filters = (filters,) if filters: for filter in filters: fileChooser.addChoosableFileFilter(filter) fileChooser.fileFilter = filters[0] # Enable/disable multiple file select fileChooser.setMultiSelectionEnabled(multiselect) # Restore the last directory if prefs and prefkey: defaultDirName = prefs.get(prefkey, None) if defaultDirName: defaultDirectory = File(defaultDirName) if defaultDirectory.exists(): fileChooser.currentDirectory = defaultDirectory # Preset the file name if filename: fileChooser.selectedFile = File(fileChooser.currentDirectory, filename) return fileChooser
def library_import(self, name, attributes): # equals org.python.core.ClasspathPyImporter.PYCLASSPATH_PREFIX import platform if 'Jython' in platform.python_implementation(): import org.python.core.imp as jimp if attributes['source']: if '__pyclasspath__' in attributes['source']: res = attributes['source'].split('__pyclasspath__')[1].replace(os.sep, '') attributes['source'] = str(jimp.getSyspathJavaLoader().getResources(res).nextElement()) else: try: source_uri = jimp.getSyspathJavaLoader().getResources(name + '.class').nextElement() attributes['source'] = str(source_uri) except: pass source_uri_txt = attributes['source'] if source_uri_txt and 'file:/' in source_uri_txt: import re from java.io import File as File from java.net import URL as URL filePath = re.split('.*(?=file[:])', source_uri_txt) if len(filePath) > 1: path = re.split('[!][/]', filePath[1])[0] f = File(URL(path).getFile()) source_uri_txt = f.getAbsolutePath() attributes['source'] = source_uri_txt self._send_socket("library_import", name, attributes)
def loadRasterLayer (rasterfile, mode = "r" ): ## Load a Raster file in a Layer sourceFileName[0]=rasterfile if not isinstance (rasterfile,File): rasterfile = File(rasterfile) name, ext = splitext(rasterfile.getName()) view = currentView() # Get the manager to use dalManager = DALLocator.getDataManager() mapContextManager = MapContextLocator.getMapContextManager() if ext.lower() == ".ecw" or ext.lower() == ".jp2" : # FIXME pass elif ext.lower() == ".mrsid": # FIXME pass else: # Create the parameters to open the raster store based in GDAL params = dalManager.createStoreParameters("Gdal Store") params.setFile(rasterfile) # Create the raster store dataStore = dalManager.createStore(params) # Create a raster layer based in this raster store layer = mapContextManager.createLayer(name, dataStore); view.addLayer(layer) return layer
def DiscoveryMain(Framework): logger.info('Start Phase 2 ... Apply Mapping transformation to Atrium CIs') userExtUcmdbDir = CollectorsParameters.BASE_PROBE_MGR_DIR + CollectorsParameters.getDiscoveryResourceFolder() + '\\TQLExport\\Atrium\\' inputFilesDirectory = File(userExtUcmdbDir + 'inter\\') inputFiles = inputFilesDirectory.listFiles() filePathDir = userExtUcmdbDir + 'results\\' directory = File(filePathDir) files = directory.listFiles() ## Clean up the existing result XML files if (files != None): for file in files: file.delete() ## Make sure we have XML files in the intermediate directory xmlFileInIntermediatesDirectory = 0 for inputFile in inputFiles: inputFileName = inputFile.getName() if inputFileName[len(inputFileName)-4:].lower() == '.xml' and inputFile.length() > 0: xmlFileInIntermediatesDirectory = 1 if not xmlFileInIntermediatesDirectory: logger.warn('Intermediate XML not found or invalid. Perhaps no data was received from Atrium or an error occurred in the atrium_query script.') return ## Generate the output XML files in results directory ip = CollectorsParameters.getValue(CollectorsParameters.KEY_SERVER_NAME) integrationAPI = IntegrationAPI(ip, "atrium_map.py") integrationAPI.processDir(userExtUcmdbDir) logger.info('End Phase 2 ... Apply Mapping transformation to Atrium CIs')
def updateCmdForDeltaScanning(commandLine, Framework): originalScanFileFolderPath = CollectorsParameters.PROBE_MGR_INVENTORY_XMLENRICHER_FILES_FOLDER + XmlEnricherConstants.ORIGINAL_FOLDER_NAME originalScanFile = File(originalScanFileFolderPath, InventoryUtils.generateScanFileName(Framework)) if originalScanFile.exists(): scan = None try: try: buffer = jarray.zeros(0x24, 'b') fileSize = originalScanFile.length() if fileSize > 0x24: scan = RandomAccessFile(originalScanFile, "r") scan.readFully(buffer) if (buffer[0] == 0x1F) and ((buffer[1] & 0xFF) == 0x8B) and (buffer[2] == 0x08): scan.seek(fileSize - 8) scan.readFully(buffer, 0, 8) crc32 = getInt(buffer, 0) size = getInt(buffer, 4) deltaParams = ' -oldscanid:' + str(crc32) + ' -oldscansize:' + str(size) + ' ' index = String(commandLine).indexOf(ENTERPRISE_MODE) + String(ENTERPRISE_MODE).length() commandLine = commandLine[0:index] + deltaParams + commandLine[index + 1:] logger.debug('Scanner execution command updated to ', commandLine) except: logger.debugException("Failed to calculate CRC32 and size of zipped scan file " + originalScanFile.getAbsolutePath()) finally: if scan is not None: try: scan.close() except: pass return commandLine
def __init__(self, theURL): """ generated source for method __init__ """ super(CloudGameRepository, self).__init__() self.theRepoURL = RemoteGameRepository.properlyFormatURL(theURL) # Generate a unique hash of the repository URL, to use as the # local directory for files for the offline cache. theCacheHash = StringBuilder() try: while len(theDigest): theCacheHash.append(Math.abs(theDigest[i])) i += 1 except Exception as e: theCacheHash = None theCachesDirectory = File(System.getProperty("user.home"), ".ggpserver-gamecache") theCachesDirectory.mkdir() self.theCacheDirectory = File(theCachesDirectory, "repoHash" + theCacheHash) if self.theCacheDirectory.exists(): # For existing caches, only force a full refresh at most once per day self.needsRefresh = (System.currentTimeMillis() - self.theCacheDirectory.lastModified()) > 86400000 else: self.theCacheDirectory.mkdir() self.needsRefresh = True if self.needsRefresh: refreshThread.start() # Update the game cache asynchronously if there are already games. # Otherwise, force a blocking update. if len(length): try: refreshThread.join() except InterruptedException as e: self.theCacheDirectory.setLastModified(System.currentTimeMillis()) self.needsRefresh = False
def getListOfAvailableFLVs(self): """Return list of .flv files that can be streamed.""" scope = Red5.getConnectionLocal().getScope() serverRoot = System.getProperty('red5.root') filesMap = HashMap() try: print 'Getting the FLV files' flvs = scope.getResources("streams/*.flv") for file in flvs: fso = File(serverRoot + '/webapps/oflaDemo' + file.path) flvName = fso.getName() flvBytes = 0 if hasattr(fso, 'length'): flvBytes = fso.length() else: print 'Length not found' lastMod = '0' if hasattr(fso, 'lastModified'): lastMod = self.formatDate(Date(fso.lastModified())) else: log.debug('Last modified not found') print 'FLV Name:', flvName print 'Last modified date:', lastMod print 'Size:', flvBytes print '-------' fileInfo = HashMap(3); fileInfo["name"] = flvName fileInfo["lastModified"] = lastMod fileInfo["size"] = flvBytes filesMap[flvName] = fileInfo except Exception, e: print 'Error in getListOfAvailableFLVs:', e
def restoreJSystemProperties(self): if self.userDir is None: return orig = File(self.userDir, "jsystem.properties") back = File(orig.getParentFile(), "jsystem.properties.back") FileUtils.copyFile(back, orig)
def getImageFiles(self, path): d = File(path) class Filter(FilenameFilter): def accept(lself, d, fileName):#@NoSelf return fileName.endswith(".gif") or fileName.endswith(".png") or fileName.endswith(".jpg") return d.listFiles(Filter())
def access(path, mode): """access(path, mode) -> True if granted, False otherwise Use the real uid/gid to test for access to a path. Note that most operations will use the effective uid/gid, therefore this routine can be used in a suid/sgid environment to test if the invoking user has the specified access to the path. The mode argument can be F_OK to test existence, or the inclusive-OR of R_OK, W_OK, and X_OK. """ if not isinstance(mode, (int, long)): raise TypeError('an integer is required') f = File(sys.getPath(path)) result = True if not f.exists(): result = False if mode & R_OK and not f.canRead(): result = False if mode & W_OK and not f.canWrite(): result = False if mode & X_OK: # NOTE: always False without jna-posix stat try: result = (stat(path).st_mode & _stat.S_IEXEC) != 0 except OSError: result = False return result
def __activate__(self, context): self.log = context["log"] self.request = context["request"] self.sessionState = context["sessionState"] self.sessionState.set("username","admin") processingSet = self.request.getParameter("processingSet") self.procMsg = None # read configuration and trigger processing stream sets # storing the return object on the map configFilePath = FascinatorHome.getPath("process")+"/processConfig.json" procConfigFile = File(configFilePath) if procConfigFile.exists() == True: self.dataMap = HashMap() self.dataMap.put("indexer", context['Services'].getIndexer()) self.procConfigJson = JsonSimple(procConfigFile) for configObj in self.procConfigJson.getJsonArray(): configJson = JsonSimple(configObj) procId = configJson.getString("", "id") if processingSet is not None: if procId == processingSet: self.execProcSet(procId, configJson) else: self.execProcSet(procId, configJson) if self.procMsg is None: self.procMsg = "Processing complete!" else: self.procMsg = "Configuration file does not exist: " + configFilePath
def run(): projects = Project.getProjects() if projects is None or projects.isEmpty(): IJ.log('No project open!') return ls = projects.get(0).getRootLayerSet() trees = ls.getZDisplayables(Treeline) trees.addAll(ls.getZDisplayables(AreaTree)) if trees.isEmpty(): IJ.log('No trees to process!') return dc = DirectoryChooser('Target folder') targetFolder = dc.getDirectory() if targetFolder is None: return # user canceled if targetFolder[len(targetFolder) -1] != '/': targetFolder += '/' fdir = File(targetFolder) for f in fdir.listFiles(): if f.isHidden(): continue yn = YesNoCancelDialog(IJ.getInstance(), "WARNING", "Folder not empty! Continue?") if yn.yesPressed(): break else: return process(trees, targetFolder)
def check(fn='test.txt'): f = File(fn) if not f.exists(): raise support.TestError('"%s" should exist' % fn) if not f.length(): raise support.TestError('"%s" should have contents' % fn) os.remove(fn)
def setup_wls_cp(): home_dir=rb_config.getProperty('wls.oracle.home') wls_name=rb_config.getProperty('wls.name') if home_dir and wls_name: weblogic_loc=File(home_dir + '/' + wls_name) if weblogic_loc.exists(): return create_setenv(home_dir)
def load_project(self, fname): file = File(fname) print 'Loading project file: ', file.getAbsolutePath() pm = ProjectManager() project = pm.loadProject(file) print pm.status() return project
def main(): global myarg0,myarg1,myarg2 try: myarg0 except NameError: print "please specify the path" return try: myarg1 except NameError: myarg1 = "------" if myarg1 is not str or len(myarg1) <= 6: myarg1 = "------" try: myarg2 except NameError: myarg2 = "n" f = File(myarg0) if (not f.isDirectory()) and (not f.isFile()): print "No such file or directory",myarg0 return printfiles(f,myarg1,0,myarg2)
def __deployOsbCfg(configPrefix, componentProperties): configJarFilename = componentProperties.getProperty(configPrefix + PROPERTY_OSB_CFG_SUFFIX_FILE) customisationFilename = componentProperties.getProperty(configPrefix + PROPERTY_OSB_CFG_SUFFIX_CUSTOM) log.info("Deploying OSB configuration from file '" + configJarFilename + "'") if not customisationFilename is None and len(customisationFilename) > 0: log.info("Customising using file '" + customisationFilename + "'") domainRuntime() infile = File(configJarFilename) # TODO - verify file exists infile = infile.getAbsoluteFile() # enables server to find the file sessionMBean = findService("SessionManagement", OSB_SESSION_BEAN) sessionName = getNewSessionName() sessionMBean.createSession(sessionName) # obtain the ALSBConfigurationMBean instance that operates # on the session that has just been created. Notice that # the name of the mbean contains the session name. alsbSession = findService("ALSBConfiguration." + sessionName, OSB_CONFIG_BEAN) alsbSession.uploadJarFile(__readBytes(infile)) plan = alsbSession.getImportJarInfo().getDefaultImportPlan() plan.setPreserveExistingEnvValues(false) alsbSession.importUploaded(plan) if not customisationFilename is None and len(customisationFilename) > 0: customiseImport(alsbSession, customisationFilename) # activate changes performed in the session sessionMBean.activateSession(sessionName, "Imported new application via " + configJarFilename)
def DiscoveryMain(Framework): logger.info('Start Phase 1 ... Pull from ARIS') # Set global framework global theFramework theFramework = Framework ## Make sure we have an input data file from ARIS ARISfileName = Framework.getParameter('ARIS_XML_file') or None ARISfile = File(ARISfileName) if not (ARISfile and ARISfile.exists() and ARISfile.canRead()): excInfo = ('ARIS XML input file is not specified or is invalid!') Framework.reportError(excInfo) logger.error(excInfo) return None ## Check that the language parameter is set - default to US English requestedLocaleID = Framework.getParameter('ARISLocaleId') or '&LocaleId.USen;' if not requestedLocaleID: logger.warn('ARIS LocaleID parameter is not set...defaulting to US English') requestedLocaleID = '&LocaleId.USen;' # File and directory names userExtDir = CollectorsParameters.BASE_PROBE_MGR_DIR + CollectorsParameters.getDiscoveryResourceFolder() + '\\TQLExport\\ARIS\\' intermediatesDir = userExtDir + 'inter\\' mapingFilesListFileName = userExtDir + 'tqls.txt' mappingFileNameList = checkDiscoveryResources(mapingFilesListFileName, userExtDir, Framework, intermediatesDir) if not mappingFileNameList: return None ## Get attribute names from mapping file(s) ## This is a list of extended attributes to be retrieved from ARIS for mappingFileName in mappingFileNameList: (requestedSourceObjectTypeList, requestedSourceRelationshipTypeList) = getMapping(userExtDir + 'data\\' + mappingFileName + '.xml') if requestedSourceObjectTypeList and requestedSourceRelationshipTypeList: arisObjectMap = processARISXML(ARISfile, requestedSourceObjectTypeList, requestedSourceRelationshipTypeList, requestedLocaleID) intermediateXmlDoc = None if arisObjectMap: intermediateXmlDoc = buildIntermediateXML(arisObjectMap) intermediateXmlLocation = intermediatesDir + mappingFileName + '.xml' else: Framework.reportWarning('No CIs found in the ARIS XML file') if intermediateXmlDoc: try: xmlOutputter = XMLOutputter() xmlOutputter.output(intermediateXmlDoc, FileOutputStream(intermediateXmlLocation)) except: excInfo = logger.prepareJythonStackTrace('') Framework.reportError('Error writing intermediate file: <%s>' % intermediateXmlLocation) logger.warn('[' + SCRIPT_NAME + ':DiscoveryMain] Exception: <%s>' % excInfo) pass else: Framework.reportWarning('Error creating intermediate XML') else: logger.warn('[' + SCRIPT_NAME + ':DiscoveryMain] Unable to process mapping file: <%s>' % mappingFileName) Framework.reportError(' Unable to process mapping file: <%s>' % mappingFileName) logger.info('End Phase 1.... Pull from ARIS')
def getatime(path): # We can't detect access time so we return modification time. This # matches the behaviour in os.stat(). path = _tostr(path, "getatime") f = File(path) if not f.exists(): raise OSError(0, 'No such file or directory', path) return f.lastModified() / 1000.0
def canOpenFile(self): #HACK check if mimetypes match between index and real file #dcFormat = self.__json.get("response/docs/dc_format", "") #if dcFormat is not None: # dcFormat = dcFormat[1:-1] #return dcFormat == self.__mimeType f = File(self.getObject().getId()) return f.exists();
stats = False for o, a in options: if o == "--format": format = a elif o == "--index": indexDir = a elif o == "--stats": stats = True class CustomTemplate(Template): delimiter = '#' template = CustomTemplate(format) fsDir = SimpleFSDirectory(File(indexDir)) searcher = IndexSearcher(DirectoryReader.open(fsDir)) analyzer = StandardAnalyzer(Version.LUCENE_CURRENT) parser = QueryParser(Version.LUCENE_CURRENT, "keywords", analyzer) parser.setDefaultOperator(QueryParser.Operator.AND) query = parser.parse(' '.join(args)) start = datetime.now() scoreDocs = searcher.search(query, 51).scoreDocs duration = datetime.now() - start if stats: print >>sys.stderr, "Found %d document(s) (in %s) that matched query '%s':" %(len(scoreDocs), duration, query) for scoreDoc in scoreDocs: doc = searcher.doc(scoreDoc.doc) table = dict((field.name(), field.stringValue())
from org.apache.lucene import document, store, util import numpy as np import csv # PATHS luceneIndexPath = '/home/tarun/PE/lucene/luceneIndexDirectoryNewCorpus60/' corpus = '/home/tarun/PE/newCorpus60/' trainingFilePath = '/home/tarun/PE/Dataset/training_set.tsv' lucene.initVM() # ANALYZER analyzer = StandardAnalyzer(util.Version.LUCENE_CURRENT) # DIRECTORY directory = SimpleFSDirectory(File(luceneIndexPath)) # INDEX WRITER writerConfig = IndexWriterConfig(util.Version.LUCENE_CURRENT, analyzer) writer = IndexWriter(directory, writerConfig) print writer.numDocs() # INDEXING ALL DOCUMENTS/ARTICLES IN THE CORPUS for fileName in os.listdir(corpus): #print fileName document = Document() article = os.path.join(corpus, fileName) content = open(article, 'r').read() document.add(Field("text", content, Field.Store.YES, Field.Index.ANALYZED)) writer.addDocument(document)
def rmdir(path): """rmdir(path) Remove a directory.""" if not File(path).delete(): raise OSError(0, "couldn't delete directory", path)
try: from java.io import File from java.lang import System except ImportError: print "Note: this file should be run using ..\\nC.bat -python XXX.py' or './nC.sh -python XXX.py'" print "See http://www.neuroconstruct.org/docs/python.html for more details" quit() from ucl.physiol.neuroconstruct.project import ProjectManager from ucl.physiol.neuroconstruct.utils import NumberGenerator from math import * from random import * # Load an existing neuroConstruct project projFile = File("TestPython/TestPython.neuro.xml") print "Loading project from file: " + projFile.getAbsolutePath( ) + ", exists: " + str(projFile.exists()) pm = ProjectManager() myProject = pm.loadProject(projFile) print "Loaded project: " + myProject.getProjectName() # Get first electrical stimulation & reset it stim = myProject.elecInputInfo.getStim(0) print "First stimulation setting: " + str(stim) newAmp = random() * 0.2
def getCurrentWorkingDirectory(self): return File(getcwdu())
import os folder = "C:/structure/data/2018-04-30-17-06-18-03-MexicoCity_multisample_imaging/stacks/sequential/"; targetFolder = "C:/structure/data/2018-04-30-17-06-18-03-MexicoCity_multisample_imaging/processed/sequential/"; imageWidth=1024; imageHeight=1024; imageDepth=65; pixelWidth=0.52; pixelHeight=0.52; pixelDepth=5.0; File(targetFolder).mkdirs(); print(folder) images = []; # load all images, collect them in a list for root, directories, filenames in os.walk(folder): filenames.sort(); for filename in filenames: print(filename) if (filename.endswith(".raw")): if (not File(os.path.join(targetFolder, filename + ".tif")).exists()): IJ.run("Raw...", "open=[" + os.path.join(folder, filename) + "] image=[16-bit Signed] width=" + str(imageWidth) + " height=" + str(imageHeight) + " number=" + str(imageDepth) + " little-endian"); imp = IJ.getImage();
import os import random import time from java.lang import System from java.io import File from ucl.physiol.neuroconstruct.project import ProjectManager from ucl.physiol.neuroconstruct.neuron import NeuronFileManager from ucl.physiol.neuroconstruct.nmodleditor.processes import ProcessManager timestamp = str(time.time()) pm = ProjectManager(None, None) project_path = '../VervaekeEtAl-GolgiCellNetwork.ncx' project_file = File(project_path) project = pm.loadProject(project_file) sim_config_name = 'TestKBK' # basic simulation setup sim_ref = timestamp sim_path = '../simulations/' + sim_ref project.simulationParameters.setReference(sim_ref) sim_config = project.simConfigInfo.getSimConfig(sim_config_name) project.neuronSettings.setNoConsole() project.neuronSettings.setVarTimeStep(False) # generate pm.doGenerate(sim_config_name, 1234) while pm.isGenerating():
assert(textToSynthesize != null); assert(subscriptionKey != null); String outputFormat = AudioOutputFormat.Riff24Khz16BitMonoPcm; // String deviceLanguage = "en-US"; String deviceLanguage = language; String subscriptionKey = "2633c3a0ffce1c6675b8a15c0b405f43" String genderName = Gender.Male; String voiceName = "Microsoft Server Speech Text to Speech Voice (en-US, Guy24KRUS)"; try{ byte[] audioBuffer = TTSService.Synthesize(textToSynthesize, outputFormat, deviceLanguage, genderName, voiceName, subscriptionKey); // write the pcm data to the file String outputWave = ".\\output.pcm"; File outputAudio = new File(outputWave); FileOutputStream fstream = new FileOutputStream(outputAudio); fstream.write(audioBuffer); fstream.flush(); fstream.close(); // specify the audio format AudioFormat audioFormat = new AudioFormat( AudioFormat.Encoding.PCM_SIGNED, 24000, 16, 1, 1 * 2, 24000, false);
# print doc.get('rate')+'分('+doc.get('comments')+'人评价)' # if doc.get('post')==0: # print '¥'+doc.get('price')+'\t'+'包邮' # else: # print '¥'+doc.get('price') # print doc.get('sales'),'人付款' # print '淘宝链接:',doc.get('url') # print '图片:',doc.get('img') # print '香水时代图片:',doc.get('xssd_url') # if doc.get('tune')!=None: # print 'tune:',doc.get('tune') # if doc.get('former')!=None: # print 'former:',doc.get('former') # print 'mid:',doc.get('mid') # print 'last:',doc.get('last') # else: # print 'scents:',doc.get('scents') if __name__ == '__main__': STORE_DIR = "index_tb_new" lucene.initVM(vmargs=['-Djava.awt.headless=true']) print 'lucene', lucene.VERSION # base_dir = os.path.dirname(os.path.abspath(sys.argv[0])) directory = SimpleFSDirectory(File(STORE_DIR)) searcher = IndexSearcher(DirectoryReader.open(directory)) analyzer = StandardAnalyzer(Version.LUCENE_CURRENT) run(searcher, analyzer) del searcher
def process(self, dataSource, progressBar): # we don't know how much work there is yet progressBar.switchToIndeterminate() # Set the database to be read to the once created by the SAM parser program skCase = Case.getCurrentCase().getSleuthkitCase(); fileManager = Case.getCurrentCase().getServices().getFileManager() files = fileManager.findFiles(dataSource, "$UsnJrnl:$J", "$Extend") numFiles = len(files) self.log(Level.INFO, "found " + str(numFiles) + " files") progressBar.switchToDeterminate(numFiles) fileCount = 0; # Create Event Log directory in temp directory, if it exists then continue on processing Temp_Dir = Case.getCurrentCase().getTempDirectory() self.log(Level.INFO, "create Directory " + Temp_Dir) temp_dir = os.path.join(Temp_Dir, "usnj") try: os.mkdir(temp_dir) except: self.log(Level.INFO, "Usnj Directory already exists " + temp_dir) for file in files: # Check if the user pressed cancel while we were busy if self.context.isJobCancelled(): return IngestModule.ProcessResult.OK #self.log(Level.INFO, "Processing file: " + file.getName()) fileCount += 1 # Save the DB locally in the temp folder. use file id as name to reduce collisions lclDbPath = os.path.join(temp_dir, "usnj.txt") ContentUtils.writeToFile(file, File(lclDbPath)) self.log(Level.INFO, "Saved File ==> " + lclDbPath) # Run the EXE, saving output to a sqlite database #try: self.log(Level.INFO, "Running program ==> " + self.path_to_exe + " " + Temp_Dir + "\\usnj\\usnj.txt" + \ " " + Temp_Dir + "\\usnj.db3") pipe = Popen([self.path_to_exe, os.path.join(temp_dir, "usnj.txt"), os.path.join(temp_dir, "usnj.db3")], stdout=PIPE, stderr=PIPE) out_text = pipe.communicate()[0] self.log(Level.INFO, "Output from run is ==> " + out_text) # Open the DB using JDBC lclDbPath = os.path.join(temp_dir, "usnj.db3") self.log(Level.INFO, "Path the system database file created ==> " + lclDbPath) try: Class.forName("org.sqlite.JDBC").newInstance() dbConn = DriverManager.getConnection("jdbc:sqlite:%s" % lclDbPath) except SQLException as e: self.log(Level.INFO, "Could not open database file (not SQLite) usnj.db3 (" + e.getMessage() + ")") return IngestModule.ProcessResult.OK # Query the contacts table in the database and get all columns. try: stmt = dbConn.createStatement() resultSet = stmt.executeQuery("Select tbl_name from SQLITE_MASTER; ") self.log(Level.INFO, "query SQLite Master table") except SQLException as e: self.log(Level.INFO, "Error querying database for system table (" + e.getMessage() + ")") return IngestModule.ProcessResult.OK try: self.log(Level.INFO, "Begin Create New Artifacts") artID_usnj = skCase.addArtifactType("TSK_USNJ", "NTFS UsrJrnl entries") except: self.log(Level.INFO, "Artifacts Creation Error, some artifacts may not exist now. ==> ") artID_usnj = skCase.getArtifactTypeID("TSK_USNJ") artID_usnj_evt = skCase.getArtifactType("TSK_USNJ") #self.log(Level.INFO, "get artifacts ID's " + str(artID_usnj)) #self.log(Level.INFO, "get artifacts ID's " + str(resultSet)) # Cycle through each row and create artifacts while resultSet.next(): try: #self.log(Level.INFO, "Result (" + resultSet.getString("tbl_name") + ")") table_name = resultSet.getString("tbl_name") #self.log(Level.INFO, "Result get information from table " + resultSet.getString("tbl_name") + " ") SQL_String_1 = "Select * from " + table_name + ";" SQL_String_2 = "PRAGMA table_info('" + table_name + "')" #self.log(Level.INFO, SQL_String_1) #self.log(Level.INFO, SQL_String_2) Column_Names = [] Column_Types = [] resultSet2 = stmt.executeQuery(SQL_String_2) while resultSet2.next(): Column_Names.append(resultSet2.getString("name").upper()) Column_Types.append(resultSet2.getString("type")) if resultSet2.getString("type").upper() == "TEXT": try: attID_ex1 = skCase.addArtifactAttributeType("TSK_USNJ_" + resultSet2.getString("name").upper(), BlackboardAttribute.TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING, resultSet2.getString("name")) #self.log(Level.INFO, "attribure id for " + "TSK_" + resultSet2.getString("name") + " == " + str(attID_ex1)) except: self.log(Level.INFO, "Attributes Creation Error, " + resultSet2.getString("name") + " ==> ") else: try: attID_ex1 = skCase.addArtifactAttributeType("TSK_USNJ_" + resultSet2.getString("name").upper(), BlackboardAttribute.TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.LONG, resultSet2.getString("name")) #self.log(Level.INFO, "attribure id for " + "TSK_" + resultSet2.getString("name") + " == " + str(attID_ex1)) except: self.log(Level.INFO, "Attributes Creation Error, " + resultSet2.getString("name") + " ==> ") resultSet3 = stmt.executeQuery(SQL_String_1) while resultSet3.next(): art = file.newArtifact(artID_usnj) Column_Number = 1 for col_name in Column_Names: #self.log(Level.INFO, "Result get information for column " + Column_Names[Column_Number - 1] + " ") #self.log(Level.INFO, "Result get information for column " + Column_Types[Column_Number - 1] + " ") #self.log(Level.INFO, "Result get information for column_number " + str(Column_Number) + " ") c_name = "TSK_USNJ_" + col_name #self.log(Level.INFO, "Attribute Name is " + c_name + " Atribute Type is " + str(Column_Types[Column_Number - 1])) attID_ex1 = skCase.getAttributeType(c_name) if Column_Types[Column_Number - 1] == "TEXT": art.addAttribute(BlackboardAttribute(attID_ex1, ParseUsnJIngestModuleFactory.moduleName, resultSet3.getString(Column_Number))) else: art.addAttribute(BlackboardAttribute(attID_ex1, ParseUsnJIngestModuleFactory.moduleName, resultSet3.getInt(Column_Number))) Column_Number = Column_Number + 1 except SQLException as e: self.log(Level.INFO, "Error getting values from usnj table (" + e.getMessage() + ")") # Clean up stmt.close() dbConn.close() # Fire an event to notify the UI and others that there are new artifacts IngestServices.getInstance().fireModuleDataEvent( ModuleDataEvent(ParseUsnJIngestModuleFactory.moduleName, artID_usnj_evt, None)) #Clean up EventLog directory and files os.remove(lclDbPath) try: os.remove(os.path.join(temp_dir, "usnj.txt")) except: self.log(Level.INFO, "removal of usnj.txt file failed " + temp_dir + "\\" + file.getName()) try: os.rmdir(temp_dir) except: self.log(Level.INFO, "removal of usnj directory failed " + temp_dir) # After all databases, post a message to the ingest messages in box. message = IngestMessage.createMessage(IngestMessage.MessageType.DATA, "Usnj Parser", " Usnj Has Been Analyzed " ) IngestServices.getInstance().postMessage(message) # Fire an event to notify the UI and others that there are new artifacts IngestServices.getInstance().fireModuleDataEvent( ModuleDataEvent(ParseUsnJIngestModuleFactory.moduleName, artID_usnj_evt, None)) return IngestModule.ProcessResult.OK
process_pre_inferred_types(types_map, typesF) output_path = "." if len(sys.argv) > 2: output_path = sys.argv[2] lucene.initVM() analyzer = StandardAnalyzer(Version.LUCENE_4_10_1) analyzer_ws = WhitespaceAnalyzer(Version.LUCENE_4_10_1) std_path = "%s/lucene_full_standard/" % (output_path) ws_path = "%s/lucene_full_ws/" % (output_path) if os.path.exists(std_path): os.remove(std_path) if os.path.exists(ws_path): os.remove(ws_path) indexDir1 = SimpleFSDirectory(File(std_path)) indexDir2 = SimpleFSDirectory(File(ws_path)) writerConfig1 = IndexWriterConfig(Version.LUCENE_4_10_1, analyzer) writerConfig2 = IndexWriterConfig(Version.LUCENE_4_10_1, analyzer_ws) writer1 = IndexWriter(indexDir1, writerConfig1) writer2 = IndexWriter(indexDir2, writerConfig2) print "%d docs in index1" % writer1.numDocs() print "%d docs in index2" % writer2.numDocs() print "Reading lines from sys.stdin..." ftypes = open(LUCENE_TYPES_FILE, "w") for n, l in enumerate(sys.stdin): doc = Document() doc_lc = Document()
if (fDialog.getFile() is None) or fDialog.getFile() == "": raise Exception( "User chose to cancel or no file selected >> So no Import will be performed... " ) fileToImport = os.path.join(fDialog.getDirectory(), fDialog.getFile()) if not os.path.exists(fileToImport) or not os.path.isfile(fileToImport): raise Exception( "IMPORT: Sorry, file selected to import either does not exist or is not a file" ) print "I will import from file: %s" % (fileToImport) fileToImport = File(fileToImport) # set the parameters newAccountSet = False # True Creates a new account set!!! DANGER!! filename = fileToImport.getName() extension = os.path.splitext(filename)[1].upper() wrapper = moneydance_ui.getCurrentAccounts() # type: AccountBookWrapper book = moneydance_data importWasSuccessful = True dirName = fileToImport.getParent() try: fPath = fileToImport.getAbsolutePath() # type: str
# Output it result = pluginsTreeToString() if uploadToWiki or compareToWiki: from fiji import MediaWikiClient client = MediaWikiClient(URL) wiki = client.sendRequest(['title', PAGE, 'action', 'edit'], None) begin = wiki.find('<textarea') begin = wiki.find('>', begin) + 1 end = wiki.find('</textarea>', begin) wiki = wiki[begin:end].replace('<', '<') if wiki != result: if compareToWiki: from fiji import SimpleExecuter from java.io import File, FileWriter file1 = File.createTempFile('PluginList', '.wiki') writer1 = FileWriter(file1) writer1.write(wiki) writer1.close() file2 = File.createTempFile('PluginList', '.wiki') writer2 = FileWriter(file2) writer2.write(result) writer2.close() diff = SimpleExecuter([ 'git', 'diff', color, '--ignore-space-at-eol', '--patience', '--no-index', '--src-prefix=wiki/', '--dst-prefix=local/', file1.getAbsolutePath(), file2.getAbsolutePath() ]) file1.delete() file2.delete()
def getmtime(path): path = _tostr(path, "getmtime") f = File(sys.getPath(path)) if not f.exists(): raise OSError(0, 'No such file or directory', path) return f.lastModified() / 1000.0
assert helloworld[0] == 'hello' assert helloworld[1] == 'world' helloworld = loadStrings(createReader("strings.txt")) assert helloworld[0] == 'hello' assert helloworld[1] == 'world' expected = 'hello\nworld\n' for i, c in enumerate(loadBytes("strings.txt")): assert c == ord(expected[i]) o = loadJSONObject("object.json") assert o.getString('phrase') == 'hello world' assert o.getInt('amount') == 42 from java.io import File o = loadJSONObject(File("testing/resources/data/object.json")) assert o.getString('phrase') == 'hello world' assert o.getInt('amount') == 42 a = loadJSONArray("array.json") assert a.getString(0) == 'hello' assert a.getString(1) == 'world' a = loadJSONArray(File("testing/resources/data/array.json")) assert a.getString(0) == 'hello' assert a.getString(1) == 'world' print 'OK' exit()
def isdir(path): """Test whether a path is a directory""" path = _tostr(path, "isdir") return File(path).isDirectory()
def _realpath(path): try: return asPyString(File(sys.getPath(path)).getCanonicalPath()) except java.io.IOException: return _abspath(path)
def _abspath(path): # Must use normpath separately because getAbsolutePath doesn't normalize # and getCanonicalPath would eliminate symlinks. return normpath(asPyString(File(sys.getPath(path)).getAbsolutePath()))
def lstat(path): """lstat(path) -> stat result Like stat(path), but do not follow symbolic links. """ abs_path = sys.getPath(path) try: return stat_result.from_jnastat(_posix.lstat(abs_path)) except NotImplementedError: pass except: raise f = File(sys.getPath(path)) # XXX: jna-posix implements similar link detection in # JavaFileStat.calculateSymlink, fallback to that instead when not # native abs_parent = f.getAbsoluteFile().getParentFile() if not abs_parent: # root isn't a link return stat(path) can_parent = abs_parent.getCanonicalFile() if can_parent.getAbsolutePath() == abs_parent.getAbsolutePath(): # The parent directory's absolute path is canonical.. if f.getAbsolutePath() != f.getCanonicalPath(): # but the file's absolute and canonical paths differ (a # link) return stat_result((_stat.S_IFLNK, 0, 0, 0, 0, 0, 0, 0, 0, 0)) # The parent directory's path is not canonical (one of the parent # directories is a symlink). Build a new path with the parent's # canonical path and compare the files f = File(_path.join(can_parent.getAbsolutePath(), f.getName())) if f.getAbsolutePath() != f.getCanonicalPath(): return stat_result((_stat.S_IFLNK, 0, 0, 0, 0, 0, 0, 0, 0, 0)) # Not a link, only now can we determine if it exists (because # File.exists() returns False for dead links) if not f.exists(): raise OSError(errno.ENOENT, strerror(errno.ENOENT), path) return stat(path)
def isfile(path): """Test whether a path is a regular file""" path = _tostr(path, "isfile") return File(sys.getPath(path)).isFile()
try: from java.io import File from java.lang import System except ImportError: print "Note: this file should be run using ..\\nC.bat -python XXX.py' or './nC.sh -python XXX.py'" print "See http://www.neuroconstruct.org/docs/python.html for more details" quit() from ucl.physiol.neuroconstruct.project import ProjectManager from math import * # Load an existing neuroConstruct project projFile = File("TestPython/TestPython.neuro.xml") print "Loading project from file: " + projFile.getAbsolutePath( ) + ", exists: " + str(projFile.exists()) pm = ProjectManager() myProject = pm.loadProject(projFile) print "Loaded project: " + myProject.getProjectName() # Add a number of cells to the generatedCellPositions, connections to generatedNetworkConnections # and electrical inputs to generatedElecInputs numCells = 12 for i in range(0, numCells): x = 100 * sin(i * 2 * pi / numCells) y = 100 * cos(i * 2 * pi / numCells) myProject.generatedCellPositions.addPosition("SampleCellGroup", i, x, y, 0)
def isabs(path): """Test whether a path is absolute""" path = _tostr(path, "isabs") return File(path).isAbsolute()
def file_exists(filename): if hasattr(os,'access'): return os.access(filename,os.F_OK) elif using_java: return File(filename).exists()
if model_file_name is None: add_to_archive = True try: domain_name = model_context.get_domain_name() model_file = File.createTempFile(domain_name, '.yaml').getCanonicalFile() model_file_name = model_context.get_domain_name() + '.yaml' except (IllegalArgumentException, IOException), ie: ex = exception_helper.create_discover_exception( 'WLSDPLY-06008', ie.getLocalizedMessage(), error=ie) __logger.throwing(ex, class_name=_class_name, method_name=_method_name) raise ex else: model_file = FileUtils.getCanonicalFile(File(model_file_name)) try: model_translator.PythonToFile(model.get_model()).write_to_file( model_file.getAbsolutePath()) except TranslateException, ex: # Jython 2.2.1 does not support finally so use this like a finally block... if add_to_archive and not model_file.delete(): model_file.deleteOnExit() raise ex if add_to_archive: try: archive_file = model_context.get_archive_file() archive_file.addModel(model_file, model_file_name) if not model_file.delete():
def process(self, dataSource, progressBar): # Use blackboard class to index blackboard artifacts for keyword search blackboard = Case.getCurrentCase().getServices().getBlackboard() # Find the "contacts.db" file to parse fileManager = Case.getCurrentCase().getServices().getFileManager() files = fileManager.findFiles(dataSource, "contacts.db") # keep track of progress num_files = len(files) progressBar.switchToDeterminate(num_files) file_count = 0 for f in files: # First check to see if the job was cancelled # If it was, return. if self.context.isJobCancelled(): return IngestModule.ProcessResult.OK # Begin processing the next file self.log(Level.INFO, "Processing file: " + f.getName()) file_count += 1 # need to save the current file to disk for processng lclDbPath = os.path.join(Case.getCurrentCase().getTempDirectory(), str(f.getId()) + ".db") ContentUtils.writeToFile(f, File(lclDbPath)) # Next we open the db for processing try: Class.forName("org.sqlite.JDBC").newInstance() db_conn = DriverManager.getConnection("jdbc:sqlite:%s" % lclDbPath) except SQLException as e: self.log( Level.INFO, "Could not open database file (not SQLite) " + f.getName() + " (" + e.getMessage() + ")") return IngestModule.ProcessResult.OK # queryr all from the contacts table try: stmt = db_conn.createStatement() result_set = stmt.executeQuery("SELECT * FROM contacts") except SQLException as e: self.log( Level.INFO, "Error querying database for contacts table (" + e.getMessage() + ")") return IngestModule.ProcessResult.OK # Process the DB while result_set.next(): # Make an artifact on the blackboard and give it attributes art = f.newArtifact( BlackboardArtifact.ARTIFACT_TYPE.TSK_CONTACT) # Name found in DB name = result_set.getString("name") art.addAttribute( BlackboardAttribute( BlackboardAttribute.ATTRIBUTE_TYPE.TSK_NAME_PERSON. getTypeID(), HW11ContactsDbIngestModuleFactory.moduleName, name)) # Email found email = result_set.getString("email") art.addAttribute( BlackboardAttribute( BlackboardAttribute.ATTRIBUTE_TYPE.TSK_EMAIL.getTypeID( ), HW11ContactsDbIngestModuleFactory.moduleName, email)) # Phone number found phone = result_set.getString("phone") art.addAttribute( BlackboardAttribute( BlackboardAttribute.ATTRIBUTE_TYPE.TSK_PHONE_NUMBER. getTypeID(), HW11ContactsDbIngestModuleFactory.moduleName, phone)) # Index the artifact for keyword searching try: blackboard.indexArtifact(art) except Blackboard.BlackboardException as e: self.log(Level.SEVERE, "Error indexing artifact " + art.getDisplayName()) # Update the UI of the newly created artifact IngestServices.getInstance().fireModuleDataEvent( ModuleDataEvent(HW11ContactsDbIngestModuleFactory.moduleName, BlackboardArtifact.ARTIFACT_TYPE.TSK_CONTACT, None)) # Clean up tasks for the current file stmt.close() db_conn.close() os.remove(lclDbPath) # After all db's are processed, post a message to the ingest inbox. message = IngestMessage.createMessage(IngestMessage.MessageType.DATA, "ContactsDb Analyzer", "Found %d files" % file_count) IngestServices.getInstance().postMessage(message) # return return IngestModule.ProcessResult.OK
def __init__(self, filename, saver, loader, create_lexicon_flag=False, cluster_results=False, num_clusters=3, tf_idf_flag=True, ah_flag=False, pr_flag=False, normalize=True, create_page_rank_flag=False, directory='../index', linksFile="../index/IntLinks.txt", citationsFile="../index/IntCitations.txt", n_retrieves=10, root_set_size=10, maxIter=10, verbose=False): """ The init function loads up the pickled tf lexicon, normalizers and the norms of all the documents. Also this is the function that setsup the entire class incuding all its flags """ self.normalize = normalize self.tf_idf_flag = tf_idf_flag self.ah_flag = ah_flag self.pr_flag = pr_flag self.n_retrieves = n_retrieves self.root_set_size = root_set_size self.maxIter = maxIter self.cluster_results = cluster_results self.num_clusters = num_clusters assert self.root_set_size >= self.n_retrieves directory = SimpleFSDirectory(File('../index')) self.reader = IndexReader.open(directory) self.numDocs = self.reader.maxDoc() if self.normalize is False: self.norm = None # TF and/or TF IDF part. if create_lexicon_flag is True: if normalize is True: # make second flag True if you want to create a normalizer also. # but assumed created from Create Lexicon file if verbose is True: print "extracting all the norms of docs" start_time = time.clock() self.norm = calculateNormalizer(reader=self.reader, verbose=verbose) if verbose is True: end_time = time.clock() print "time taken for calculating norms is : " + str( end_time - start_time) + " seconds" saver(filename=filename + '_norms', obj=self.norm) self.lexicon = createLexicon( filename=filename, reader=self.reader, norm=self.norm if self.normalize else None, tf_idf_flag=self.tf_idf_flag, verbose=verbose) saver(filename=filename + '_lexicon', obj=self.lexicon) else: self.lexicon = loader(filename + '_lexicon') if normalize is True: if verbose is True: print "loading norms" start_time = time.clock() self.norm = loader(filename=filename + '_norms') if verbose is True: end_time = time.clock() print "time taken for loading norms is : " + str( end_time - start_time) + " seconds" # Authorties and Hubs part. if self.ah_flag or self.pr_flag is True: self.graph = LinkAnalysis(linksFile=linksFile, citationsFile=citationsFile, reader=self.reader, verbose=verbose) if pr_flag is True: # pre-calculate page rank if create_page_rank_flag is True: # make this a create_page_rank_flag self.pr_values = page_rank(alpha=0.1, maxIter=self.maxIter, numDocs=self.numDocs, graph=self.graph, saver=saver, verbose=True) else: # if already calculated simply load self.pr_values = loader('rank')
def process(self, datasource, progressbar): PostBoard=IngestServices.getInstance() progressbar.switchToIndeterminate() ccase = Case.getCurrentCase().getSleuthkitCase() blackboard = Case.getCurrentCase().getServices().getBlackboard() msgcounter = 0 # if ((file.getType() == TskData.TSK_DB_FILES_TYPE_ENUM.UNALLOC_BLOCKS) or # (file.getType() == TskData.TSK_DB_FILES_TYPE_ENUM.UNUSED_BLOCKS) or # (file.isFile() == true)): # return IngestModule.ProcessResult.OK # #prepare artifacts artifact_name = "TSK_WINCOM_CONTACT" artifact_desc = "Windows Communication Contacts" try: artID_wincom_contact = ccase.addArtifactType(artifact_name, artifact_desc) attribute_name = "TSK_WINCOM_CONTACT_SERVICE" attribute_name1 = "TSK_WINCOM_CONTACT_APPID" attribute_name2 = "TSK_WINCOM_CONTACT_FIRSTNAME" attribute_name3 = "TSK_WINCOM_CONTACT_LASTNAME" attribute_name4 = "TSK_WINCOM_CONTACT_COUNTRY" attribute_name5 = "TSK_WINCOM_CONTACT_LOCALITY" attribute_name6 = "TSK_WINCOM_CONTACT_REGION" attribute_name7 = "TSK_WINCOM_CONTACT_BIRTHDAY" attID_ex= ccase.addArtifactAttributeType(attribute_name, BlackboardAttribute.TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING, "Service vs Person") attID_ex1 = ccase.addArtifactAttributeType(attribute_name1, BlackboardAttribute.TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING, "Application") attID_ex2 = ccase.addArtifactAttributeType(attribute_name2, BlackboardAttribute.TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING, "First Name") attID_ex3 = ccase.addArtifactAttributeType(attribute_name3, BlackboardAttribute.TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING, "Last Name") attID_ex4 = ccase.addArtifactAttributeType(attribute_name4, BlackboardAttribute.TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING, "Country") attID_ex5 = ccase.addArtifactAttributeType(attribute_name5, BlackboardAttribute.TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING, "City") attID_ex6 = ccase.addArtifactAttributeType(attribute_name6, BlackboardAttribute.TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING, "Region") attID_ex7 = ccase.addArtifactAttributeType(attribute_name7, BlackboardAttribute.TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING, "Birthday") except: message = IngestMessage.createMessage( IngestMessage.MessageType.DATA, WindowsCommunicationModuleFactory.moduleName + str(msgcounter), "Error creating artifacts"+ str(msgcounter)) #IngestServices.getInstance().postMessage(message) self.log(Level.INFO, "Artifacts Creation Error, some artifacts may not exist now. ==> ") artID_wincom_contact = ccase.getArtifactTypeID(artifact_name) artID_wincom_contact_evt = ccase.getArtifactType(artifact_name) #get files ##IngestServices.getInstance().postMessage(message) fileManager = Case.getCurrentCase().getServices().getFileManager() files = fileManager.findFiles(datasource, "%appcontent-ms") numFiles = len(files) progressbar.switchToDeterminate(numFiles) fileCount = 0 for file in files: fileCount = fileCount + 1 progressbar.progress(fileCount) progressbar.progress("Windows Communication Analyzer") msgcounter+=1 # message = IngestMessage.createMessage( # IngestMessage.MessageType.DATA, WindowsCommunicationModuleFactory.moduleName + str(msgcounter), str(msgcounter) + " - in file loop and found file:" + str(file.getParentPath())) # #IngestServices.getInstance().postMessage(message) ParentPath = file.getParentPath() #if "microsoft.windowscommunicationsapps" in ParentPath and "_8wekyb3d8bbwe" in ParentPath and file.getName().lower().endswith("appcontent-ms") and "Address" in ParentPath : if file.getSize() > 0 and "microsoft.windowscommunicationsapps" in ParentPath: lclXMLPath = os.path.join(Case.getCurrentCase().getTempDirectory(), str(file.getId()) + ".appcontent-ms") ContentUtils.writeToFile(file, File(lclXMLPath)) if self.context.isJobCancelled(): return IngestModule.ProcessResult.OK with open(lclXMLPath, "rb") as XMLFile: with open(lclXMLPath+".rewrite", 'w+b') as NewXMLFile: contents = XMLFile.read() newContent = contents.decode('utf-16').encode('utf-8') NewXMLFile.write(newContent.replace('<?xml version="1.0" encoding="utf-16"?>','<?xml version="1.0" encoding="utf-8"?>')) NewXMLFile.close() XMLFile.close() f = open(lclXMLPath+".rewrite", "rb") all = f.read() f.close() message = IngestMessage.createMessage( IngestMessage.MessageType.DATA, WindowsCommunicationModuleFactory.moduleName + str(msgcounter), all) #IngestServices.getInstance().postMessage(message) #XMLFile = open(lclXMLPath, "rb") AppID="**" FirstName = "**" LastName = "**" HomeAddress1Country = "**" HomeAddress1Locality = "**" HomeAddress1Region = "**" Birthday = "**" Service = "**" root = ET.fromstring(all) for elem in root.iter(): teller = 0 if "System.Contact.ConnectedServiceName" in str(elem.attrib): if len(elem.text) == 2: for child in elem: teller =+1 Service = child.text # if teller == 1: break else: Service = elem.text elif "System.AppUserModel.PackageRelativeApplicationID" in str(elem.attrib): if len(elem.text) == 2: for child in elem: teller =+1 AppID = child.text # if teller == 1: break else: if len(elem.text)==0: AppID = elem.text else: AppID = "**" elif "System.Contact.FirstName" in str(elem.attrib): if len(elem.text) == 2: for child in elem: teller =+1 FirstName = child.text if teller == 1: break else: FirstName = elem.text elif "System.Contact.LastName" in str(elem.attrib): if len(elem.text) == 2: for child in elem: teller =+1 LastName = child.text if teller == 1: break else: LastName = elem.text elif "System.Contact.HomeAddress1Country" in str(elem.attrib): if len(elem.text) == 2: teller =+1 for child in elem: HomeAddress1Country = child.text if teller == 1: break else: HomeAddress1Country = elem.text elif "System.Contact.HomeAddress1Locality" in str(elem.attrib): if len(elem.text) == 2: for child in elem: teller =+1 HomeAddress1Locality = child.text if teller == 1: break else: HomeAddress1Locality = elem.text elif "System.Contact.HomeAddress1Region" in str(elem.attrib): if len(elem.text) == 2: for child in elem: teller =+1 HomeAddress1Region = child.text if teller == 1: break else: HomeAddress1Region = elem.text elif "System.Contact.Birthday" in str(elem.attrib): if len(elem.text) == 2: for child in elem: teller =+1 Birthday = child.text if teller == 1: break else: Birthday = elem.text else: #another value - manual forensics #AppID = "BLAHELSE" pass #IngestServices.getInstance().postMessage(message) #end looping through elements #ready for next file art = file.newArtifact(artID_wincom_contact) attID_ex =ccase.getAttributeType("TSK_WINCOM_CONTACT_SERVICE") art.addAttribute(BlackboardAttribute(attID_ex, WindowsCommunicationModuleFactory.moduleName, Service)) attID_ex1 =ccase.getAttributeType("TSK_WINCOM_CONTACT_APPID") art.addAttribute(BlackboardAttribute(attID_ex1, WindowsCommunicationModuleFactory.moduleName, AppID)) attID_ex2 =ccase.getAttributeType("TSK_WINCOM_CONTACT_FIRSTNAME") art.addAttribute(BlackboardAttribute(attID_ex2, WindowsCommunicationModuleFactory.moduleName, FirstName)) attID_ex3 =ccase.getAttributeType("TSK_WINCOM_CONTACT_LASTNAME") art.addAttribute(BlackboardAttribute(attID_ex3, WindowsCommunicationModuleFactory.moduleName, LastName)) attID_ex4 =ccase.getAttributeType("TSK_WINCOM_CONTACT_COUNTRY") art.addAttribute(BlackboardAttribute(attID_ex4, WindowsCommunicationModuleFactory.moduleName, HomeAddress1Country)) attID_ex5 =ccase.getAttributeType("TSK_WINCOM_CONTACT_LOCALITY") art.addAttribute(BlackboardAttribute(attID_ex5, WindowsCommunicationModuleFactory.moduleName, HomeAddress1Locality)) attID_ex6 =ccase.getAttributeType("TSK_WINCOM_CONTACT_REGION") art.addAttribute(BlackboardAttribute(attID_ex6, WindowsCommunicationModuleFactory.moduleName, HomeAddress1Region)) attID_ex7 =ccase.getAttributeType("TSK_WINCOM_CONTACT_BIRTHDAY") art.addAttribute(BlackboardAttribute(attID_ex7, WindowsCommunicationModuleFactory.moduleName, Birthday)) IngestServices.getInstance().fireModuleDataEvent(ModuleDataEvent(WindowsCommunicationModuleFactory.moduleName, artID_wincom_contact_evt, None)) else: pass #cleanup try: os.remove(lclXMLPath) os.remove(lclXMLPath+".rewrite") except: self.log(Level.INFO, "Cleanup of files did not work ") message = IngestMessage.createMessage(IngestMessage.MessageType.DATA, "Windows Communication App - Contacts", "Windows Communication App - Contacts Has Been Analyzed " ) IngestServices.getInstance().postMessage(message) return IngestModule.ProcessResult.OK # def shutDown(self): # # As a final part of this example, we'll send a message to the ingest inbox with the number of files found (in this thread) # msg2 = IngestMessage.createMessage( # IngestMessage.MessageType.DATA, WindowsCommunicationModuleFactory.moduleName, # "Found " + str(self.filesFound)) # ingestServices = IngestServices.getInstance().postMessage(msg2)
jsonFile = open(meta_file, "wb") jsonFile.write(json.toString(True)) jsonFile.close except Exception, e: raise finally: if jsonFile is not None: jsonFile.close self.logInfo( file, "Submitting to harvest. Config file is %s and meta_file is %s" % (self.harvestConfig, meta_file)) try: ## Now instantiate a HarvestClient just for this File. harvester = HarvestClient(File(self.harvestConfig), File(meta_file), "guest") harvester.start() ## And cleanup afterwards oid = harvester.getUploadOid() except HarvesterException, e: self.logException(file, e) raise finally: ## Cleanup if harvester is not None: harvester.shutdown() self.logInfo( file,
simDuration = preStimDur + stimDur # ms analyseStartTime = stimDel + 0 # So it's firing at a steady rate... analyseStopTime = simDuration analyseThreshold = -20 # mV mpiConfig = MpiSettings.LOCAL_SERIAL # Default setting: run on one local processor mpiConfig = MpiSettings.MATLEM_1PROC # Run on one processor on UCL cluster numConcurrentSims = 4 if mpiConfig != MpiSettings.LOCAL_SERIAL: numConcurrentSims = 30 suggestedRemoteRunTime = 9 # mins # Load neuroConstruct project projFile = File("../DentateGyrus.ncx") simManager = nc.SimulationManager(projFile, numConcurrentSims) simManager.generateFICurve("NEURON", simConfig, stimAmpLow, stimAmpInc, stimAmpHigh, stimDel, stimDur, simDuration, analyseStartTime, analyseStopTime, analyseThreshold, mpiConfig=mpiConfig,
def basename(path): """Return the final component of a pathname""" path = _tostr(path, "basename") return asPyString(File(path).getName())