Exemplo n.º 1
0
    def __init__(self, client, logger):
        self.client = client  # Client object (give access to all configuration options)
        self.timeout = client.timeout  # No timeout for now
        self.logger = logger  # Logger object
        self.connection = None  # The connection
        self.igniter = None
        self.ssl = False
        self.reader = DiskReader(PXPaths.TXQ + self.client.name,
                                 self.client.batch, self.client.validation,
                                 self.client.patternMatching,
                                 self.client.mtime, True, self.logger,
                                 eval(self.client.sorter), self.client)

        self.debugFile = False

        self.cacheManager = CacheManager(maxEntries=self.client.cache_size,
                                         timeout=8 * 3600)

        # AMQP  is there a max for message size
        # self.set_maxLength(self.client.maxLength)

        # statistics.
        self.totBytes = 0
        self.initialTime = time.time()
        self.finalTime = None

        self._connect()
Exemplo n.º 2
0
    def __init__(self, client, logger):
        self.client = client  # Client object (give access to all configuration options)
        self.remoteHost = client.host  # Remote host (name or ip)
        self.port = int(
            client.port)  # Port (int) to which the receiver is bind
        self.address = (self.remoteHost, self.port)  # Socket address
        self.timeout = client.timeout  # No timeout for now
        self.logger = logger  # Logger object
        self.socketAMIS = None  # The socket
        self.igniter = None
        self.reader = DiskReader(PXPaths.TXQ + self.client.name,
                                 self.client.batch, self.client.validation,
                                 self.client.patternMatching,
                                 self.client.mtime, True, self.logger,
                                 eval(self.client.sorter), self.client)

        self.preamble = chr(curses.ascii.SOH) + "\r\n"
        self.endOfLineSep = "\r\r\n"
        self.endOfMessage = self.endOfLineSep + chr(
            curses.ascii.ETX) + "\r\n\n" + chr(curses.ascii.EOT)
        self.debugFile = False

        self.cacheManager = CacheManager(maxEntries=self.client.cache_size,
                                         timeout=8 * 3600)

        # AMIS's maximum bulletin size is 14000

        self.set_maxLength(self.client.maxLength)

        # statistics.
        self.totBytes = 0
        self.initialTime = time.time()
        self.finalTime = None

        self._connect()
Exemplo n.º 3
0
    def ingestCollection(self, igniter):
        from DiskReader import DiskReader
        import bulletinManager
        import CollectionManager

        bullManager = bulletinManager.bulletinManager(
                    PXPaths.RXQ + self.source.name,
                    self.logger,
                    PXPaths.RXQ + self.source.name,
                    99999,
                    '\n',
                    self.source.extension,
                    self.source.routingTable,
                    self.source.mapEnteteDelai,
                    self.source)

        reader = DiskReader(bullManager.pathSource, self.source.batch, self.source.validation, self.source.patternMatching,
                            self.source.mtime, False, self.source.logger, self.source.sorter,self.source)

        collect = CollectionManager.CollectionManager( self, bullManager, reader )

        while True:
            # If a SIGHUP signal is received ...
            if igniter.reloadMode == True:
                # We assign the defaults, reread configuration file for the source
                # and reread all configuration file for the clients (all this in __init__)
                self.source.__init__(self.source.name, self.source.logger)
                bullManager = bulletinManager.bulletinManager(
                               PXPaths.RXQ + self.source.name,
                               self.logger,
                               PXPaths.RXQ + self.source.name,
                               99999,
                               '\n',
                               self.source.extension,
                               self.source.routingTable,
                               self.source.mapEnteteDelai,
                               self.source)
                reader = DiskReader(bullManager.pathSource, self.source.batch, self.source.validation, self.source.patternMatching,
                                    self.source.mtime, False, self.source.logger, self.source.sorter,self.source)
                collect = CollectionManager.CollectionManager( self, bullManager, reader )

                self.logger.info("Receiver has been reloaded")
                igniter.reloadMode = False

            collect.process()

            time.sleep(20)
Exemplo n.º 4
0
 def resetReader(self):
     self.reader = DiskReader(PXPaths.TXQ + self.flow.name,
                              self.flow.batch,            # Number of files we read each time
                              self.flow.validation,       # name validation
                              self.flow.patternMatching,  # pattern matching
                              self.flow.mtime,            # we don't check modification time
                              True,                       # priority tree
                              self.logger,
                              eval(self.flow.sorter),
                              self.flow)
Exemplo n.º 5
0
    def __init__(self, sourlient):

        AFTNPaths.normalPaths(sourlient.name)
        PXPaths.normalPaths()
        self.sysman = SystemManager()                      # General system manager
        self.sourlient = sourlient                         # Sourlient (Source/Client) object containing configuration infos.

        self.logger = sourlient.logger                     # Logger object
        self.subscriber = sourlient.subscriber             # Determine if it will act like a subscriber or a provider(MHS)
        self.host = sourlient.host                         # Remote host (name or ip)
        self.portR = sourlient.portR                       # Receiving port
        self.portS = sourlient.portS                       # Sending port
        
        self.batch = sourlient.batch                       # Number of files we read in a pass (20)
        self.timeout = sourlient.timeout                   # Timeout time in seconds (default = 10 seconds)
        self.sleepBetweenConnect = int('10')               # Time (in seconds) between connection trials 
        self.slow = sourlient.slow                         # Sleeps are added when we want to be able to decrypt log entries
        self.igniter = None                                # Igniter object (link to pid)

        self.writePath = AFTNPaths.RECEIVED                # Where we write messages we receive
        self.archivePath = AFTNPaths.SENT                  # Where we put sent messages
        self.specialOrdersPath = AFTNPaths.SPECIAL_ORDERS  # Where we put special orders

        # Paths creation
        self.sysman.createDir(PXPaths.TXQ + self.sourlient.name)
        self.sysman.createDir(self.writePath)
        self.sysman.createDir(self.archivePath)
        self.sysman.createDir(self.specialOrdersPath)


        self.mm = MessageManager(self.logger, self.sourlient)  # AFTN Protocol is implemented in MessageManager Object
        self.remoteAddress = None                          # Remote address (where we will connect())
        self.socket = None                                 # Socket object
        self.dataFromFiles = []                            # A list of tuples (content, filename) obtained from a DiskReader 

        self.reader = DiskReader(PXPaths.TXQ + self.sourlient.name, self.sourlient.batch,
                                 self.sourlient.validation, self.sourlient.diskReaderPatternMatching,
                                 self.sourlient.mtime, True, self.logger, eval(self.sourlient.sorter), self.sourlient)
        
        self.debug = True  # Debugging switch
        self.justConnect = False  # Boolean that indicates when a connexion just occur
        
        self.totBytes = 0

        #self.printInitInfos()
        self.makeConnection()
Exemplo n.º 6
0
    def __init__(self,path,client,logger):
        gateway.gateway.__init__(self, path, client, logger)
        self.client = client
        self.establishConnection()

        self.reader = DiskReader(PXPaths.TXQ + self.client.name, 
                                 self.client.batch,            # Number of files we read each time
                                 self.client.validation,       # name validation
                                 self.client.patternMatching,  # pattern matching
                                 self.client.mtime,            # we don't check modification time
                                 True,                         # priority tree
                                 self.logger,
                                 eval(self.client.sorter),
                                 self.client)

        # Mechanism to eliminate multiple copies of a bulletin
        self.cacheManager = CacheManager(maxEntries=self.client.cache_size, timeout=8*3600)

        # WMO's maximum bulletin size is 500 000 bytes
        self.set_maxLength( self.client.maxLength )
Exemplo n.º 7
0
 def resetReader(self):
     self.reader = DiskReader(PXPaths.TXQ + self.client.name,
                              self.client.batch, self.client.validation,
                              self.client.patternMatching,
                              self.client.mtime, True, self.logger,
                              eval(self.client.sorter), self.client)
Exemplo n.º 8
0
 def setUp(self, logFile='log/DiskReader.log'):
     self.logger = Logger(logFile, 'DEBUG', 'Sub')
     self.logger = self.logger.getLogger()
     self.reader = DiskReader('txq/test/', 20, False, 5, False, True,
                              self.logger)
Exemplo n.º 9
0
    def doSpecialOrders(self, path):
        # Stop, restart, reload, deconnect, connect could be put here?
        reader = DiskReader(path)
        reader.read()
        dataFromFiles = reader.getFilenamesAndContent()
        for index in range(len(dataFromFiles)):
            words = dataFromFiles[index][0].strip().split()
            self.logger.info("Special Order: %s" %
                             (dataFromFiles[index][0].strip()))

            if words[0] == 'outCSN':
                if words[1] == '+':
                    self.nextCSN()
                    self.logger.info("CSN = %s" % self.CSN)
                elif words[1] == '-':
                    # This case is only done for testing purpose. It is not complete and not correct when CSN
                    # value is 0 or 1
                    self.nextCSN(str(int(self.CSN) - 2))
                    self.logger.info("CSN = %s" % self.CSN)
                elif words[1] == 'print':
                    self.logger.info("CSN = %s" % self.CSN)
                else:
                    # We suppose it's a number, we don't verify!!
                    self.nextCSN(words[1])
                    self.logger.info("CSN = %s" % self.CSN)

            elif words[0] == 'inCSN':
                if words[1] == '+':
                    self.calcWaitedTID(self.waitedTID)
                    self.logger.info("Waited TID = %s" % self.waitedTID)
                elif words[1] == '-':
                    # This case is only done for testing purpose. It is not complete and not correct when waited TID
                    # value is 0 or 1
                    self.calcWaitedTID(self.otherStationID + "%04d" %
                                       (int(self.waitedTID[3:]) - 2))
                    self.logger.info("Waited TID = %s" % self.waitedTID)
                elif words[1] == 'print':
                    self.logger.info("Waited TID = %s" % self.waitedTID)
                else:
                    # We suppose it's a number, we don't verify!!
                    self.calcWaitedTID(self.otherStationID +
                                       "%04d" % int(words[1]))
                    self.logger.info("Waited TID = %s" % self.waitedTID)

            elif words[0] == 'ackWaited':
                if words[1] == 'print':
                    self.logger.info("Waiting for ack: %s" %
                                     self.getWaitingForAck())
                else:
                    self.setWaitingForAck(words[1])
                    self.incrementSendingInfos()
            elif words[0] == 'ackNotWaited':
                self.setWaitingForAck(None)
                self.resetSendingInfos()
                self.updatePartsToSend()
            elif words[0] == 'ackUsed':
                self.ackUsed = words[1] == 'True' or words[1] == 'true'
            elif words[0] == 'printState':
                self.logger.info(self.state.infos())
            else:
                pass

            try:
                os.unlink(dataFromFiles[0][1])
                self.logger.debug("%s has been erased",
                                  os.path.basename(dataFromFiles[index][1]))
            except OSError, e:
                (type, value, tb) = sys.exc_info()
                self.logger.error("Unable to unlink %s ! Type: %s, Value: %s" %
                                  (dataFromFiles[index][1], type, value))
Exemplo n.º 10
0
    from Logger import *
    from MessageAFTN import MessageAFTN
    from DiskReader import DiskReader
    from MessageParser import MessageParser
    from Sourlient import Sourlient

    logger = Logger('/apps/px/aftn/log/mm.log', 'DEBUG', 'mm')
    logger = logger.getLogger()

    sourlient = Sourlient('aftn', logger)

    print "Longueur Max = %d" % MessageAFTN.MAX_TEXT_SIZE

    mm = MessageManager(logger, sourlient)

    reader = DiskReader("/apps/px/bulletins", 8)
    reader.read()
    reader.sort()
    """
    for file in reader.getFilesContent(8):
       print file
       mm.setInfos(MessageParser(file).getHeader())
       mm.printInfos()
       if mm.header:
          myMessage = MessageAFTN(logger, file, mm.stationID, mm.originatorAddress,mm.priority,
                                  mm.destAddress, mm.CSN, mm.filingTime, mm.dateTime)

          myMessage.printInfos()

    """
    for file in reader.getFilesContent(8):
Exemplo n.º 11
0
    def ingestBulletinFile(self, igniter):
        from DiskReader import DiskReader
        import bulletinManager
        import bulletinManagerAm
        from PullFTP import PullFTP

        sleep_sec = 1
        if self.source.type == 'pull-bulletin' or self.source.pull_script != None:
            sleep_sec = self.source.pull_sleep

        bullManager = bulletinManager.bulletinManager(
            self.ingestDir, self.logger, self.ingestDir, 99999, '\n',
            self.source.extension, self.source.routingTable,
            self.source.mapEnteteDelai, self.source,
            self.source.addStationInFilename)

        if self.source.bulletin_type == 'am':
            bullManager = bulletinManagerAm.bulletinManagerAm(
                self.ingestDir, self.logger, self.ingestDir, 99999, '\n',
                self.source.extension, self.source.routingTable,
                self.source.addSMHeader, PXPaths.STATION_TABLE,
                self.source.mapEnteteDelai, self.source,
                self.source.addStationInFilename)

        if self.source.nodups:
            self.fileCache = CacheManager(maxEntries=self.source.cache_size,
                                          timeout=8 * 3600)

        reader = DiskReader(bullManager.pathSource, self.source.batch,
                            self.source.validation,
                            self.source.patternMatching, self.source.mtime,
                            False, self.source.logger, self.source.sorter,
                            self.source)
        while True:
            # If a SIGHUP signal is received ...
            if igniter.reloadMode == True:
                # We assign the defaults, reread configuration file for the source
                # and reread all configuration file for the clients (all this in __init__)
                if self.source.type == 'filter-bulletin':
                    self.source.__init__(self.source.name, self.source.logger,
                                         True, True)
                else:
                    self.source.__init__(self.source.name, self.source.logger)

                bullManager = bulletinManager.bulletinManager(
                    self.ingestDir, self.logger, self.ingestDir, 99999, '\n',
                    self.source.extension, self.source.routingTable,
                    self.source.mapEnteteDelai, self.source,
                    self.source.addStationInFilename)

                if self.source.bulletin_type == 'am':
                    bullManager = bulletinManagerAm.bulletinManagerAm(
                        self.ingestDir, self.logger, self.ingestDir, 99999,
                        '\n', self.source.extension, self.source.routingTable,
                        self.source.addSMHeader, PXPaths.STATION_TABLE,
                        self.source.mapEnteteDelai, self.source,
                        self.source.addStationInFilename)

                if self.source.nodups:
                    self.fileCache = CacheManager(
                        maxEntries=self.source.cache_size, timeout=8 * 3600)

                reader = DiskReader(bullManager.pathSource, self.source.batch,
                                    self.source.validation,
                                    self.source.patternMatching,
                                    self.source.mtime, False,
                                    self.source.logger, self.source.sorter,
                                    self.source)

                self.logger.info("Receiver has been reloaded")
                igniter.reloadMode = False

            # pull files in rxq directory if in pull mode
            if self.source.type == 'pull-bulletin' or self.source.pull_script != None:
                files = []
                sleeping = os.path.isfile(PXPaths.RXQ + self.source.name +
                                          '/.sleep')

                if self.source.type == 'pull-bulletin':
                    puller = PullFTP(self.source, self.logger, sleeping)
                    files = puller.get()
                    puller.close()
                elif self.source.pull_script != None:
                    files = self.source.pull_script(self.source, self.logger,
                                                    sleeping)

                if not sleeping:
                    self.logger.debug("Number of files pulled = %s" %
                                      len(files))
                else:
                    self.logger.info("This pull is sleeping")

            # normal diskreader call for files
            reader.read()

            # processing the list if necessary...

            if self.source.lx_execfile != None and len(reader.sortedFiles) > 0:
                sfiles = []
                sfiles.extend(reader.sortedFiles)
                self.logger.info("%d files process with lx_script" %
                                 len(sfiles))
                sortedFiles = self.source.run_lx_script(
                    sfiles, self.source.logger)
                reader.sortedFiles = sortedFiles

            # continue normally
            data = reader.getFilesContent(reader.batch)

            if len(data) == 0:
                time.sleep(sleep_sec)
                continue
            else:
                self.logger.info("%d bulletins will be ingested", len(data))

            # Write (and name correctly) the bulletins to disk, erase them after
            for index in range(len(data)):

                # ignore duplicate if requiered
                duplicate = self.source.nodups and self.fileCache.find(
                    data[index], 'md5') is not None

                #nb_bytes = len(data[index])
                #self.logger.info("Lecture de %s: %d bytes" % (reader.sortedFiles[index], nb_bytes))
                if not duplicate:

                    # converting the file if necessary
                    if self.source.fx_execfile != None:

                        file = reader.sortedFiles[index]
                        fxfile = self.source.run_fx_script(
                            file, self.source.logger)

                        # convertion did not work
                        if fxfile == None:
                            self.logger.warning(
                                "FX script ignored the file : %s" %
                                os.path.basename(file))
                            os.unlink(file)
                            continue

                        # file already in proper format
                        elif fxfile == file:
                            self.logger.warning(
                                "FX script kept the file as is : %s" %
                                os.path.basename(file))

                        # file converted...
                        else:
                            self.logger.info("FX script modified %s to %s " %
                                             (os.path.basename(file),
                                              os.path.basename(fxfile)))
                            os.unlink(file)
                            fp = open(fxfile, 'r')
                            dx = fp.read()
                            fp.close()
                            reader.sortedFiles[index] = fxfile
                            data[index] = dx

                    # writing/ingesting the bulletin
                    if isinstance(bullManager,
                                  bulletinManagerAm.bulletinManagerAm):
                        bullManager.writeBulletinToDisk(data[index], True)
                    else:
                        bullManager.writeBulletinToDisk(
                            data[index], True, True)

                try:
                    file = reader.sortedFiles[index]
                    os.unlink(file)
                    if duplicate:
                        self.logger.info("suppressed duplicate file %s",
                                         os.path.basename(file))
                    self.logger.debug("%s has been erased",
                                      os.path.basename(file))
                except OSError, e:
                    (type, value, tb) = sys.exc_info()
                    self.logger.error(
                        "Unable to unlink %s ! Type: %s, Value: %s" %
                        (reader.sortedFiles[index], type, value))
Exemplo n.º 12
0
    def ingestSingleFile(self, igniter):
        from DiskReader import DiskReader
        from DirectRoutingParser import DirectRoutingParser
        from PullFTP import PullFTP

        if self.source.routemask:
            self.drp = DirectRoutingParser(self.source.routingTable,
                                           self.allNames, self.logger,
                                           self.source.routing_version)
            self.drp.parse()

        if self.source.nodups:
            self.fileCache = CacheManager(maxEntries=self.source.cache_size,
                                          timeout=8 * 3600)

        reader = DiskReader(self.ingestDir, self.source.batch,
                            self.source.validation,
                            self.source.patternMatching, self.source.mtime,
                            False, self.source.logger, self.source.sorter,
                            self.source)

        sleep_sec = 1
        if self.source.type == 'pull-file' or self.source.pull_script != None:
            sleep_sec = self.source.pull_sleep

        while True:
            if igniter.reloadMode == True:
                # We assign the defaults, reread configuration file for the source
                # and reread all configuration file for the clients (all this in __init__)
                if self.source.type == 'filter':
                    self.source.__init__(self.source.name, self.source.logger,
                                         True, True)
                else:
                    self.source.__init__(self.source.name, self.source.logger)

                if self.source.routemask:
                    self.drp = DirectRoutingParser(self.source.routingTable,
                                                   self.allNames, self.logger)
                    self.drp.parse()

                if self.source.nodups:
                    self.fileCache = CacheManager(
                        maxEntries=self.source.cache_size, timeout=8 * 3600)

                reader = DiskReader(self.ingestDir, self.source.batch,
                                    self.source.validation,
                                    self.source.patternMatching,
                                    self.source.mtime, False,
                                    self.source.logger, self.source.sorter,
                                    self.source)
                self.logger.info("Receiver has been reloaded")
                igniter.reloadMode = False

            # pull files in rxq directory if in pull mode
            if self.source.type == 'pull-file' or self.source.pull_script != None:
                files = []
                sleeping = os.path.isfile(PXPaths.RXQ + self.source.name +
                                          '/.sleep')

                if self.source.type == 'pull-file':
                    puller = PullFTP(self.source, self.logger, sleeping)
                    files = puller.get()
                    puller.close()
                elif self.source.pull_script != None:
                    files = self.source.pull_script(self.source, self.logger,
                                                    sleeping)

                if not sleeping:
                    self.logger.debug("Number of files pulled = %s" %
                                      len(files))
                else:
                    self.logger.info("This pull is sleeping")

            # normal diskreader call for files
            reader.read()
            if len(reader.sortedFiles) <= 0:
                time.sleep(sleep_sec)
                continue

            sortedFiles = reader.sortedFiles[:self.source.batch]

            # processing the list if necessary...

            if self.source.lx_execfile != None:
                sfiles = []
                sfiles.extend(sortedFiles)
                self.logger.info("%d files process with lx_script" %
                                 len(sfiles))
                sortedFiles = self.source.run_lx_script(
                    sfiles, self.source.logger)

            self.logger.info("%d files will be ingested" % len(sortedFiles))

            for file in sortedFiles:
                self.ingestFile(file)