class unittest_PullFTP(unittest.TestCase): def setUp(self, logFile='log/PullFTP.log'): self.logger = Logger(logFile, 'DEBUG', 'Sub') self.logger = self.logger.getLogger() self.source = Source('source-ftp', self.logger) self.puller = PullFTP(self.source, self.logger) def test_PullFTP(self): print self.source.pulls print self.puller.get() self.assertEqual(None, None)
class unittest_PullFTP(unittest.TestCase): def setUp(self,logFile='log/PullFTP.log'): self.logger = Logger(logFile, 'DEBUG', 'Sub') self.logger = self.logger.getLogger() self.source = Source('source-ftp', self.logger) self.puller = PullFTP(self.source,self.logger) def test_PullFTP(self): print self.source.pulls print self.puller.get() self.assertEqual(None, None)
def setUp(self,logFile='log/PullFTP.log'): self.logger = Logger(logFile, 'DEBUG', 'Sub') self.logger = self.logger.getLogger() self.source = Source('source-ftp', self.logger) self.puller = PullFTP(self.source,self.logger)
def ingestBulletinFile(self, igniter): from DiskReader import DiskReader import bulletinManager import bulletinManagerAm from PullFTP import PullFTP sleep_sec = 1 if self.source.type == 'pull-bulletin' or self.source.pull_script != None : sleep_sec = self.source.pull_sleep bullManager = bulletinManager.bulletinManager( self.ingestDir, self.logger, self.ingestDir, 99999, '\n', self.source.extension, self.source.routingTable, self.source.mapEnteteDelai, self.source, self.source.addStationInFilename) if self.source.bulletin_type == 'am' : bullManager = bulletinManagerAm.bulletinManagerAm( self.ingestDir, self.logger, self.ingestDir, 99999, '\n', self.source.extension, self.source.routingTable, self.source.addSMHeader, PXPaths.STATION_TABLE, self.source.mapEnteteDelai, self.source, self.source.addStationInFilename) if self.source.nodups : self.fileCache = CacheManager(maxEntries=self.source.cache_size, timeout=8*3600) reader = DiskReader(bullManager.pathSource, self.source.batch, self.source.validation, self.source.patternMatching, self.source.mtime, False, self.source.logger, self.source.sorter, self.source) while True: # If a SIGHUP signal is received ... if igniter.reloadMode == True: # We assign the defaults, reread configuration file for the source # and reread all configuration file for the clients (all this in __init__) if self.source.type == 'filter-bulletin' : self.source.__init__(self.source.name, self.source.logger, True, True) else : self.source.__init__(self.source.name, self.source.logger) bullManager = bulletinManager.bulletinManager( self.ingestDir, self.logger, self.ingestDir, 99999, '\n', self.source.extension, self.source.routingTable, self.source.mapEnteteDelai, self.source, self.source.addStationInFilename) if self.source.bulletin_type == 'am' : bullManager = bulletinManagerAm.bulletinManagerAm( self.ingestDir, self.logger, self.ingestDir, 99999, '\n', self.source.extension, self.source.routingTable, self.source.addSMHeader, PXPaths.STATION_TABLE, self.source.mapEnteteDelai, self.source, self.source.addStationInFilename) if self.source.nodups : self.fileCache = CacheManager(maxEntries=self.source.cache_size, timeout=8*3600) reader = DiskReader(bullManager.pathSource, self.source.batch, self.source.validation, self.source.patternMatching, self.source.mtime, False, self.source.logger, self.source.sorter,self.source) self.logger.info("Receiver has been reloaded") igniter.reloadMode = False # pull files in rxq directory if in pull mode if self.source.type == 'pull-bulletin' or self.source.pull_script != None : files = [] sleeping = os.path.isfile(PXPaths.RXQ + self.source.name + '/.sleep') if self.source.type == 'pull-bulletin' : puller = PullFTP(self.source,self.logger,sleeping) files = puller.get() puller.close() elif self.source.pull_script != None : files = self.source.pull_script(self.source,self.logger,sleeping) if not sleeping : self.logger.debug("Number of files pulled = %s" % len(files) ) else : self.logger.info("This pull is sleeping") # normal diskreader call for files reader.read() # processing the list if necessary... if self.source.lx_execfile != None and len(reader.sortedFiles) > 0: sfiles = [] sfiles.extend(reader.sortedFiles) self.logger.info("%d files process with lx_script" % len(sfiles)) sortedFiles = self.source.run_lx_script(sfiles,self.source.logger) reader.sortedFiles = sortedFiles # continue normally data = reader.getFilesContent(reader.batch) if len(data) == 0: time.sleep(sleep_sec) continue else: self.logger.info("%d bulletins will be ingested", len(data)) # Write (and name correctly) the bulletins to disk, erase them after for index in range(len(data)): # ignore duplicate if requiered duplicate = self.source.nodups and self.fileCache.find(data[index], 'md5') is not None #nb_bytes = len(data[index]) #self.logger.info("Lecture de %s: %d bytes" % (reader.sortedFiles[index], nb_bytes)) if not duplicate : # converting the file if necessary if self.source.fx_execfile != None : file = reader.sortedFiles[index] fxfile = self.source.run_fx_script(file,self.source.logger) # convertion did not work if fxfile == None : self.logger.warning("FX script ignored the file : %s" % os.path.basename(file) ) os.unlink(file) continue # file already in proper format elif fxfile == file : self.logger.warning("FX script kept the file as is : %s" % os.path.basename(file) ) # file converted... else : self.logger.info("FX script modified %s to %s " % (os.path.basename(file),os.path.basename(fxfile)) ) os.unlink(file) fp = open(fxfile,'r') dx = fp.read() fp.close() reader.sortedFiles[index] = fxfile data[index] = dx # writing/ingesting the bulletin if isinstance(bullManager,bulletinManagerAm.bulletinManagerAm): bullManager.writeBulletinToDisk(data[index], True) else : bullManager.writeBulletinToDisk(data[index], True, True) try: file = reader.sortedFiles[index] os.unlink(file) if duplicate : self.logger.info("suppressed duplicate file %s", os.path.basename(file)) self.logger.debug("%s has been erased", os.path.basename(file)) except OSError, e: (type, value, tb) = sys.exc_info() self.logger.error("Unable to unlink %s ! Type: %s, Value: %s" % (reader.sortedFiles[index], type, value))
def ingestSingleFile(self, igniter): from DiskReader import DiskReader from DirectRoutingParser import DirectRoutingParser from PullFTP import PullFTP if self.source.routemask : self.drp = DirectRoutingParser(self.source.routingTable, self.allNames, self.logger, self.source.routing_version) self.drp.parse() if self.source.nodups : self.fileCache = CacheManager(maxEntries=self.source.cache_size, timeout=8*3600) reader = DiskReader(self.ingestDir, self.source.batch, self.source.validation, self.source.patternMatching, self.source.mtime, False, self.source.logger, self.source.sorter, self.source) sleep_sec = 1 if self.source.type == 'pull-file' or self.source.pull_script != None : sleep_sec = self.source.pull_sleep while True: if igniter.reloadMode == True: # We assign the defaults, reread configuration file for the source # and reread all configuration file for the clients (all this in __init__) if self.source.type == 'filter' : self.source.__init__(self.source.name, self.source.logger, True, True) else : self.source.__init__(self.source.name, self.source.logger) if self.source.routemask : self.drp = DirectRoutingParser(self.source.routingTable, self.allNames, self.logger) self.drp.parse() if self.source.nodups : self.fileCache = CacheManager(maxEntries=self.source.cache_size, timeout=8*3600) reader = DiskReader(self.ingestDir, self.source.batch, self.source.validation, self.source.patternMatching, self.source.mtime, False, self.source.logger, self.source.sorter, self.source) self.logger.info("Receiver has been reloaded") igniter.reloadMode = False # pull files in rxq directory if in pull mode if self.source.type == 'pull-file' or self.source.pull_script != None : files = [] sleeping = os.path.isfile(PXPaths.RXQ + self.source.name + '/.sleep') if self.source.type == 'pull-file' : puller = PullFTP(self.source,self.logger,sleeping) files = puller.get() puller.close() elif self.source.pull_script != None : files = self.source.pull_script(self.source,self.logger,sleeping) if not sleeping : self.logger.debug("Number of files pulled = %s" % len(files) ) else : self.logger.info("This pull is sleeping") # normal diskreader call for files reader.read() if len(reader.sortedFiles) <= 0: time.sleep(sleep_sec) continue sortedFiles = reader.sortedFiles[:self.source.batch] # processing the list if necessary... if self.source.lx_execfile != None : sfiles = [] sfiles.extend(sortedFiles) self.logger.info("%d files process with lx_script" % len(sfiles)) sortedFiles = self.source.run_lx_script(sfiles,self.source.logger) self.logger.info("%d files will be ingested" % len(sortedFiles)) for file in sortedFiles: self.ingestFile(file)
def ingestBulletinFile(self, igniter): from DiskReader import DiskReader import bulletinManager import bulletinManagerAm from PullFTP import PullFTP sleep_sec = 1 if self.source.type == 'pull-bulletin' or self.source.pull_script != None: sleep_sec = self.source.pull_sleep bullManager = bulletinManager.bulletinManager( self.ingestDir, self.logger, self.ingestDir, 99999, '\n', self.source.extension, self.source.routingTable, self.source.mapEnteteDelai, self.source, self.source.addStationInFilename) if self.source.bulletin_type == 'am': bullManager = bulletinManagerAm.bulletinManagerAm( self.ingestDir, self.logger, self.ingestDir, 99999, '\n', self.source.extension, self.source.routingTable, self.source.addSMHeader, PXPaths.STATION_TABLE, self.source.mapEnteteDelai, self.source, self.source.addStationInFilename) if self.source.nodups: self.fileCache = CacheManager(maxEntries=self.source.cache_size, timeout=8 * 3600) reader = DiskReader(bullManager.pathSource, self.source.batch, self.source.validation, self.source.patternMatching, self.source.mtime, False, self.source.logger, self.source.sorter, self.source) while True: # If a SIGHUP signal is received ... if igniter.reloadMode == True: # We assign the defaults, reread configuration file for the source # and reread all configuration file for the clients (all this in __init__) if self.source.type == 'filter-bulletin': self.source.__init__(self.source.name, self.source.logger, True, True) else: self.source.__init__(self.source.name, self.source.logger) bullManager = bulletinManager.bulletinManager( self.ingestDir, self.logger, self.ingestDir, 99999, '\n', self.source.extension, self.source.routingTable, self.source.mapEnteteDelai, self.source, self.source.addStationInFilename) if self.source.bulletin_type == 'am': bullManager = bulletinManagerAm.bulletinManagerAm( self.ingestDir, self.logger, self.ingestDir, 99999, '\n', self.source.extension, self.source.routingTable, self.source.addSMHeader, PXPaths.STATION_TABLE, self.source.mapEnteteDelai, self.source, self.source.addStationInFilename) if self.source.nodups: self.fileCache = CacheManager( maxEntries=self.source.cache_size, timeout=8 * 3600) reader = DiskReader(bullManager.pathSource, self.source.batch, self.source.validation, self.source.patternMatching, self.source.mtime, False, self.source.logger, self.source.sorter, self.source) self.logger.info("Receiver has been reloaded") igniter.reloadMode = False # pull files in rxq directory if in pull mode if self.source.type == 'pull-bulletin' or self.source.pull_script != None: files = [] sleeping = os.path.isfile(PXPaths.RXQ + self.source.name + '/.sleep') if self.source.type == 'pull-bulletin': puller = PullFTP(self.source, self.logger, sleeping) files = puller.get() puller.close() elif self.source.pull_script != None: files = self.source.pull_script(self.source, self.logger, sleeping) if not sleeping: self.logger.debug("Number of files pulled = %s" % len(files)) else: self.logger.info("This pull is sleeping") # normal diskreader call for files reader.read() # processing the list if necessary... if self.source.lx_execfile != None and len(reader.sortedFiles) > 0: sfiles = [] sfiles.extend(reader.sortedFiles) self.logger.info("%d files process with lx_script" % len(sfiles)) sortedFiles = self.source.run_lx_script( sfiles, self.source.logger) reader.sortedFiles = sortedFiles # continue normally data = reader.getFilesContent(reader.batch) if len(data) == 0: time.sleep(sleep_sec) continue else: self.logger.info("%d bulletins will be ingested", len(data)) # Write (and name correctly) the bulletins to disk, erase them after for index in range(len(data)): # ignore duplicate if requiered duplicate = self.source.nodups and self.fileCache.find( data[index], 'md5') is not None #nb_bytes = len(data[index]) #self.logger.info("Lecture de %s: %d bytes" % (reader.sortedFiles[index], nb_bytes)) if not duplicate: # converting the file if necessary if self.source.fx_execfile != None: file = reader.sortedFiles[index] fxfile = self.source.run_fx_script( file, self.source.logger) # convertion did not work if fxfile == None: self.logger.warning( "FX script ignored the file : %s" % os.path.basename(file)) os.unlink(file) continue # file already in proper format elif fxfile == file: self.logger.warning( "FX script kept the file as is : %s" % os.path.basename(file)) # file converted... else: self.logger.info("FX script modified %s to %s " % (os.path.basename(file), os.path.basename(fxfile))) os.unlink(file) fp = open(fxfile, 'r') dx = fp.read() fp.close() reader.sortedFiles[index] = fxfile data[index] = dx # writing/ingesting the bulletin if isinstance(bullManager, bulletinManagerAm.bulletinManagerAm): bullManager.writeBulletinToDisk(data[index], True) else: bullManager.writeBulletinToDisk( data[index], True, True) try: file = reader.sortedFiles[index] os.unlink(file) if duplicate: self.logger.info("suppressed duplicate file %s", os.path.basename(file)) self.logger.debug("%s has been erased", os.path.basename(file)) except OSError, e: (type, value, tb) = sys.exc_info() self.logger.error( "Unable to unlink %s ! Type: %s, Value: %s" % (reader.sortedFiles[index], type, value))
def ingestSingleFile(self, igniter): from DiskReader import DiskReader from DirectRoutingParser import DirectRoutingParser from PullFTP import PullFTP if self.source.routemask: self.drp = DirectRoutingParser(self.source.routingTable, self.allNames, self.logger, self.source.routing_version) self.drp.parse() if self.source.nodups: self.fileCache = CacheManager(maxEntries=self.source.cache_size, timeout=8 * 3600) reader = DiskReader(self.ingestDir, self.source.batch, self.source.validation, self.source.patternMatching, self.source.mtime, False, self.source.logger, self.source.sorter, self.source) sleep_sec = 1 if self.source.type == 'pull-file' or self.source.pull_script != None: sleep_sec = self.source.pull_sleep while True: if igniter.reloadMode == True: # We assign the defaults, reread configuration file for the source # and reread all configuration file for the clients (all this in __init__) if self.source.type == 'filter': self.source.__init__(self.source.name, self.source.logger, True, True) else: self.source.__init__(self.source.name, self.source.logger) if self.source.routemask: self.drp = DirectRoutingParser(self.source.routingTable, self.allNames, self.logger) self.drp.parse() if self.source.nodups: self.fileCache = CacheManager( maxEntries=self.source.cache_size, timeout=8 * 3600) reader = DiskReader(self.ingestDir, self.source.batch, self.source.validation, self.source.patternMatching, self.source.mtime, False, self.source.logger, self.source.sorter, self.source) self.logger.info("Receiver has been reloaded") igniter.reloadMode = False # pull files in rxq directory if in pull mode if self.source.type == 'pull-file' or self.source.pull_script != None: files = [] sleeping = os.path.isfile(PXPaths.RXQ + self.source.name + '/.sleep') if self.source.type == 'pull-file': puller = PullFTP(self.source, self.logger, sleeping) files = puller.get() puller.close() elif self.source.pull_script != None: files = self.source.pull_script(self.source, self.logger, sleeping) if not sleeping: self.logger.debug("Number of files pulled = %s" % len(files)) else: self.logger.info("This pull is sleeping") # normal diskreader call for files reader.read() if len(reader.sortedFiles) <= 0: time.sleep(sleep_sec) continue sortedFiles = reader.sortedFiles[:self.source.batch] # processing the list if necessary... if self.source.lx_execfile != None: sfiles = [] sfiles.extend(sortedFiles) self.logger.info("%d files process with lx_script" % len(sfiles)) sortedFiles = self.source.run_lx_script( sfiles, self.source.logger) self.logger.info("%d files will be ingested" % len(sortedFiles)) for file in sortedFiles: self.ingestFile(file)
def setUp(self, logFile='log/PullFTP.log'): self.logger = Logger(logFile, 'DEBUG', 'Sub') self.logger = self.logger.getLogger() self.source = Source('source-ftp', self.logger) self.puller = PullFTP(self.source, self.logger)