示例#1
0
    def _findTAF(self, stations, date):
        from StationParser import StationParser
        from TAFParser import TAFParser

        results = [
        ]  # ex: [('CYOW', TAF_LINE, TAF_HEADER_TIME, TAF_FILE, TAF_FILE_TIME), ('CYUL', ...)]

        sp = StationParser(PXPaths.ETC + 'stations_TAF.conf')
        sp.parse()

        for station in stations:
            headers = sp.headers.get(station, [])
            filesToParse = self._getFilesToParse(PXPaths.DB + date + '/FC/',
                                                 headers)
            filesToParse.extend(
                self._getFilesToParse(PXPaths.DB + date + '/FT/', headers))
            #print("In findTAF, len(filesToParse) = %d" % len(filesToParse))
            theLine, bestHeaderTime, theFile, bestFileTime = self._findMoreRecentStation(
                TAFParser(''), filesToParse, station)

            if theLine:
                theLine += '='

            results.append(
                (station, theLine, bestHeaderTime, theFile, bestFileTime))

        return results
示例#2
0
    def _findFD(self, stations, fdtype, date):
        from StationParser import StationParser
        from FDParser import FDParser

        results = [
        ]  # ex: [('CYOW', FD_LINE, FD_HEADER_TIME, FD_FILE, FD_FILE_TIME), ('CYUL', ...)]

        sp = StationParser(PXPaths.ETC + 'stations_FD.conf')
        sp.parse()

        if fdtype in ['FD1', 'FD2', 'FD3']:
            number = fdtype[-1]
        else:
            number = ''

        for station in stations:
            headers = sp.headers.get(station, [])
            headers.sort()

            lowHeaders = []
            highHeaders = []

            for header in headers:
                if header in eval('DBSearcher.LOW' + number):
                    lowHeaders.append(header)
                elif header in eval('DBSearcher.HIGH' + number):
                    highHeaders.append(header)

            for header in lowHeaders + highHeaders:
                filesToParse = self._getFilesToParse(
                    PXPaths.DB + date + '/FD/', [header])
                #print("In findFD, len(filesToParse) = %d" % len(filesToParse))
                theLine, bestHeaderTime, theFile, bestFileTime = self._findMoreRecentStation(
                    FDParser(''), filesToParse, station)
                if theLine:
                    bigTitle = FDParser('').getFDTitle(theFile)
                    #print("BIG TITLE: \n%s" % bigTitle)
                    #print theFile
                    #print "theLine: %s" % theLine
                    theLine = bigTitle + theLine

                results.append(
                    (station, theLine, bestHeaderTime, theFile, bestFileTime))

            if lowHeaders == highHeaders == []:
                results.append((station, None, 0, None, 0))

        return results
示例#3
0
    def __init__(self, logger=None, sourlient=None, reloadMode=False):
        
        AFTNPaths.normalPaths(sourlient.name)
        PXPaths.normalPaths()

        self.logger = logger         # Logger object
        self.sourlient = sourlient   # Sourlient object

        self.name = sourlient.name                       # Transceiver's name
        self.stationID = sourlient.stationID             # Value read from config. file
        self.otherStationID = sourlient.otherStationID   # Provider (MHS) Station ID
        self.address = sourlient.address                 # 8-letter group identifying the message originator (CYHQUSER)
        self.otherAddress = sourlient.otherAddress       # 8-letter group identifying the provider's address (CYHQMHSN)
        self.routingTable = sourlient.routingTable       # Routing table name
        self.subscriber = sourlient.subscriber           # Boolean indicating if this is a subscriber or a provider

        self.bullManager = bulletinManager(PXPaths.RXQ + self.name,
                                      self.logger,
                                      PXPaths.RXQ + self.name,
                                      9999,
                                      '\n',
                                      self.sourlient.extension,
                                      self.routingTable, 
                                      None,
                                      self.sourlient,
                                      True) 

        self.drp = self.bullManager.drp
        self.sp = StationParser(PXPaths.STATION_TABLE, logger)
        self.sp.parse()
        self.priorities = {'1':'FF', '2':'FF', '3':'GG', '4':'GG', '5':'GG'}

        if not reloadMode:
            self.afterInit()
示例#4
0
    def _findFD(self, stations, fdtype, date):
        from StationParser import StationParser
        from FDParser import FDParser

        results = [] # ex: [('CYOW', FD_LINE, FD_HEADER_TIME, FD_FILE, FD_FILE_TIME), ('CYUL', ...)]

        sp = StationParser(PXPaths.ETC + 'stations_FD.conf')
        sp.parse()

        if fdtype in ['FD1', 'FD2', 'FD3']:
            number = fdtype[-1]
        else:
            number = '' 

        for station in stations:
            headers = sp.headers.get(station, [])
            headers.sort()

            lowHeaders = []
            highHeaders = []

            for header in headers:
                if header in eval('DBSearcher.LOW' + number):
                    lowHeaders.append(header)
                elif header in eval('DBSearcher.HIGH' + number):
                    highHeaders.append(header)

            for header in lowHeaders + highHeaders:
                filesToParse = self._getFilesToParse(PXPaths.DB + date + '/FD/', [header])
                #print("In findFD, len(filesToParse) = %d" % len(filesToParse))
                theLine, bestHeaderTime, theFile, bestFileTime = self._findMoreRecentStation(FDParser(''), filesToParse, station)
                if theLine:
                    bigTitle = FDParser('').getFDTitle(theFile)
                    #print("BIG TITLE: \n%s" % bigTitle)
                    #print theFile
                    #print "theLine: %s" % theLine
                    theLine = bigTitle + theLine

                results.append((station, theLine, bestHeaderTime, theFile, bestFileTime))

            if lowHeaders == highHeaders == []:
                results.append((station, None, 0, None, 0))

        return results
示例#5
0
    def _findTAF(self, stations, date):
        from StationParser import StationParser
        from TAFParser import TAFParser

        results = [] # ex: [('CYOW', TAF_LINE, TAF_HEADER_TIME, TAF_FILE, TAF_FILE_TIME), ('CYUL', ...)]

        sp = StationParser(PXPaths.ETC + 'stations_TAF.conf')
        sp.parse()

        for station in stations:
            headers = sp.headers.get(station, [])
            filesToParse = self._getFilesToParse(PXPaths.DB + date + '/FC/', headers)
            filesToParse.extend(self._getFilesToParse(PXPaths.DB + date + '/FT/', headers))
            #print("In findTAF, len(filesToParse) = %d" % len(filesToParse))
            theLine, bestHeaderTime, theFile, bestFileTime = self._findMoreRecentStation(TAFParser(''), filesToParse, station)

            if theLine:
                theLine += '='

            results.append((station, theLine, bestHeaderTime, theFile, bestFileTime))

        return results
示例#6
0
    def __init__(self, logger=None, sourlient=None, reloadMode=False):

        AFTNPaths.normalPaths(sourlient.name)
        PXPaths.normalPaths()

        self.logger = logger  # Logger object
        self.sourlient = sourlient  # Sourlient object

        self.name = sourlient.name  # Transceiver's name
        self.stationID = sourlient.stationID  # Value read from config. file
        self.otherStationID = sourlient.otherStationID  # Provider (MHS) Station ID
        self.address = sourlient.address  # 8-letter group identifying the message originator (CYHQUSER)
        self.otherAddress = sourlient.otherAddress  # 8-letter group identifying the provider's address (CYHQMHSN)
        self.routingTable = sourlient.routingTable  # Routing table name
        self.subscriber = sourlient.subscriber  # Boolean indicating if this is a subscriber or a provider

        self.bullManager = bulletinManager(PXPaths.RXQ + self.name,
                                           self.logger,
                                           PXPaths.RXQ + self.name, 9999, '\n',
                                           self.sourlient.extension,
                                           self.routingTable, None,
                                           self.sourlient, True)

        self.drp = self.bullManager.drp
        self.sp = StationParser(PXPaths.STATION_TABLE, logger)
        self.sp.parse()
        self.priorities = {
            '1': 'FF',
            '2': 'FF',
            '3': 'GG',
            '4': 'GG',
            '5': 'GG'
        }

        if not reloadMode:
            self.afterInit()
示例#7
0
        headers.sort()

        for header in headers:
            # print "%s: %s" % (header, self.stations[header])
            stations = ""
            for station in self.stations[header]:
                if stations:
                    stations += " " + station
                else:
                    stations = station

            coll = ""

            if self.stationsColl:
                if self.stationsColl.has_key(header):
                    coll = "COLL"

            line = "%s:%s:%s:\n" % (header, coll, stations)
            self.file.write(line)


if __name__ == "__main__":
    from StationParser import StationParser

    sp = StationParser("/apps/px/etc/stations.conf")
    sp.parse()

    sfc = StationFileCreator(stations=sp.stations, stationsColl=sp.stationsColl)
    # sfc.appendToFile()
    # sfc.closeFile()
示例#8
0
    def _findSA(self, stations, date):
        # Partial header request (Type + station(s))
        # ex: SA CYOW CYUL
        # Note: Once we find the better match, we take the header we found (ex: SACN31 CWAO) and we replace the 
        # A by a P (ex: SPCN31 CWAO) and try to find a speci for the station. The speci must be between the full 
        # hour of the SA and the request time.
        from StationParser import StationParser
        from SAParser import SAParser

        results = [] # ex: [('CYOW', SA_LINE, SA_HEADER_TIME, SA_FILE, SA_FILE_TIME, SP_LINE, SP_HEADER_TIME, SP_FILE, SP_FILE_TIME), ('CYUL', ...)]

        sp = StationParser(PXPaths.ETC + 'stations_SA.conf')
        sp.parse()

        for station in stations:
            threeCharHeaders = []

            if len(station) == 3:
                #print ("%s => we will search for %s first, if we obtain no results, we will search for %s" % (station, 'C' + station, station))
                threeCharHeaders = sp.headers.get(station, [])
                station = 'C' + station
                headers = sp.headers.get(station, [])
                
            elif station[0] == 'C':
                #print("%s is a canadian station" % station)
                headers = sp.headers.get(station, []) 

            elif station[0] == 'K':
                #print("%s is an american station" % station)
                headers = sp.headers.get(station, [])

            else:
                #print("%s is an international station" % station)
                headers = sp.headers.get(station, [])

            filesToParse = self._getFilesToParse(PXPaths.DB + date + '/SA/', headers, DBSearcher.EXCLUDED_SOURCES)
            theLine, bestHeaderTime, theFile, bestFileTime = self._findMoreRecentStation(SAParser(''), filesToParse, station)

            if not theLine and threeCharHeaders:
                # If not successful at finding the 4 chars station when the original request was for a 3 chars station
                # we try the 3 chars case
                #print 'We are searching for the 3 chars station'
                station = station[1:]
                filesToParse = self._getFilesToParse(PXPaths.DB + date + '/SA/', threeCharHeaders, DBSearcher.EXCLUDED_SOURCES)
                theLine, bestHeaderTime, theFile, bestFileTime = self._findMoreRecentStation(SAParser(''), filesToParse, station)

            if theLine:
                theLine += '='
                parts = os.path.basename(theFile).split('_')
                header = parts[0] + ' ' + parts[1]
                speciLine, speciHeaderTime, speciFile, speciFileTime = self._findSpeci(station, header, bestHeaderTime, date)

                if speciHeaderTime and (speciHeaderTime < bestHeaderTime):
                    surround = 30*'=' 
                    #print 'Speci found has been rejected (%s < %s)' % (speciHeaderTime, bestHeaderTime)
                    speciLine, speciHeaderTime, speciFile, speciFileTime = None, 0, None, 0
                    #print "%s END SPECI INFOS %s\n" % (surround, surround)

            else:
                speciLine, speciHeaderTime, speciFile, speciFileTime = None, 0, None, 0

            results.append((station, theLine, bestHeaderTime, theFile, bestFileTime, speciLine, speciHeaderTime, speciFile, speciFileTime))

        return results
示例#9
0
class MessageManager:

    """
    A typical message:

    <SOH> ABC0003 14033608<CR><LF>                               <-- Heading line
    GG CYYCYFYX EGLLZRZX<CR><LF>                                 <-- Destination address line
    140335 CYEGYFYX<CR><LF>                                      <-- Origin address line

    <STX>040227 NOTAMN CYYC CALGARY INTL<CR><LF>                 <-- Start of text signal (<STX>)
    CYYC ILS 16 AND 34 U/S 0409141530<CR><LF>                    <-- Message text
    TIL 0409141800<CR><LF>

    <VT><ETX>                                                    <-- End of message signal

    """

    def __init__(self, logger=None, sourlient=None, reloadMode=False):
        
        AFTNPaths.normalPaths(sourlient.name)
        PXPaths.normalPaths()

        self.logger = logger         # Logger object
        self.sourlient = sourlient   # Sourlient object

        self.name = sourlient.name                       # Transceiver's name
        self.stationID = sourlient.stationID             # Value read from config. file
        self.otherStationID = sourlient.otherStationID   # Provider (MHS) Station ID
        self.address = sourlient.address                 # 8-letter group identifying the message originator (CYHQUSER)
        self.otherAddress = sourlient.otherAddress       # 8-letter group identifying the provider's address (CYHQMHSN)
        self.routingTable = sourlient.routingTable       # Routing table name
        self.subscriber = sourlient.subscriber           # Boolean indicating if this is a subscriber or a provider

        self.bullManager = bulletinManager(PXPaths.RXQ + self.name,
                                      self.logger,
                                      PXPaths.RXQ + self.name,
                                      9999,
                                      '\n',
                                      self.sourlient.extension,
                                      self.routingTable, 
                                      None,
                                      self.sourlient,
                                      True) 

        self.drp = self.bullManager.drp
        self.sp = StationParser(PXPaths.STATION_TABLE, logger)
        self.sp.parse()
        self.priorities = {'1':'FF', '2':'FF', '3':'GG', '4':'GG', '5':'GG'}

        if not reloadMode:
            self.afterInit()

    def afterInit(self):
        self.messageIn = None  # Last AFTN message received
        self.messageOut = None # Last AFTN message sent
        self.fromDisk = True   # Changed to False for Service Message created on the fly
        self.filenameToSend = None  # Filename of the file we want to send or just sent
        self.filenameToErase = None # Path + filename of the file we want to erase (ack just received) 

        self.type = None       # Message type. Value must be in ['AFTN', 'SVC', 'RF', 'RQ', 'PRI_DESTADD_TEXT', None]
        self.header = None     # Message WMO Header
        self.priority = None   # Priority indicator (SS, DD, FF, GG or KK)
        self.destAddress = []  # 8-letter group, max. 21 addresses
        self.CSNJustReset = False 
        try:
            self.CSN = self.state.CSN
            self.logger.info("CSN (%s) has been taken from AFTN State" % self.CSN)
        except: 
            self.CSN = '0000'      # Channel sequence number, 4 digits (ex: 0003)
        self.filingTime = None # 6-digits DDHHMM (ex:140335) indicating date and time of filing the message for transmission.
        self.dateTime = None   # 8-digits DDHHMMSS (ex:14033608)
        self.readBuffer = ''   # Buffer where we put stuff read from the socket
        
        # Queueing Service Messages when waiting for an ack before sending
        self.serviceQueue = []

        # Big message support (sending)
        self.partsToSend = []                       # Text parts of the broken message
        self.numberOfParts = len(self.partsToSend)  # Number of parts in which a long message has been divided.
        self.nextPart = 0                           # Always 0 for message that are not bigger than the defined max. 

        # Big message support (receiving)
        self.receivedParts = []
        self.notCompletelyReceived = False
        self.generalPartsRegex = re.compile(r"//END PART \d\d//")
        self.lastPartRegex = re.compile(r"//END PART \d\d/\d\d//")

        # Ack support (ack we receive because of the message we have sent)
        try:
            self.lastAckReceived = self.state.lastAckReceived  
            self.logger.info("lastAckReceived (%s) has been taken from AFTN State" % self.lastAckReceived)

            self.waitingForAck = self.state.waitingForAck 
            self.logger.info("waitingForAck (%s) has been taken from AFTN State" % self.waitingForAck)
        except:
            self.lastAckReceived = None   # None or the transmitID
            self.waitingForAck = None     # None or the transmitID 

        self.sendingInfos = (0, None) # Number of times a message has been sent and the sending time.
        self.maxAckTime =  self.sourlient.maxAckTime  # Maximum time (in seconds) we wait for an ack, before resending.
        self.maxSending = 1   # Maximum number of sendings of a message
        self.ackUsed =  self.sourlient.ackUsed  # We can use ack or not (testing purposes only)
        self.totAck = 0       # Count the number of ack (testing purpose only)

        # CSN verification (receiving)
        try:
            self.waitedTID = self.state.waitedTID
        except:
            self.waitedTID = self.otherStationID + '0001'  # Initially (when the program start) we are not sure what TID is expected
            

        # Functionnality testing switches
        self.resendSameMessage = True

        # Read Buffer management
        self.unusedBuffer = ''        # Part of the buffer that was not used

        # Persistent state
        min = 60
        now = time.mktime(time.gmtime())

        if self.subscriber:
            try:
                lastUpdate = os.stat(AFTNPaths.STATE)[8]
                diff = now - lastUpdate
                if diff < 60 * min:
                    self.state = self.unarchiveObject(AFTNPaths.STATE)
                    self.updateFromState(self.state)
                else: 
                    self.logger.warning("Archive state not read because too old (%i minutes)" % int(diff/60))
                    raise
            except:
                self.state = StateAFTN()
                self.state.fill(self)
                self.archiveObject(AFTNPaths.STATE, self.state)
        else:
            try:
                self.state = self.unarchiveObject(AFTNPaths.STATE)
                self.updateFromState(self.state)
            except:
                self.state = StateAFTN()
                self.state.fill(self)
                self.archiveObject(AFTNPaths.STATE, self.state)

    def resetCSN(self):
        hhmm = dateLib.getTodayFormatted('%H%M')
        hh = hhmm[:2]
        mm = hhmm[2:]

        if hh == '00' and mm <= '05' and not self.CSNJustReset:
            self.CSN = '0000'
            self.CSNJustReset = True
            self.logger.info('CSN has been reset to 0001')
        elif mm > '05':
            self.CSNJustReset = False
        
    def updateFromState(self, state):
        self.CSN = state.CSN
        self.waitedTID = state.waitedTID

        self.lastAckReceived = state.lastAckReceived
        self.waitingForAck = state.waitingForAck
    
    def ingest(self, bulletin):
        self.bullManager.writeBulletinToDisk(bulletin)

    def completeHeader(self, message):
        import dateLib
        wmoMessage = ''
        theHeader = ''
        allLines = []
        # We remove blank lines
        for  line in message.textLines:
            if line:
                allLines.append(line)
        
        # We don't have enough lines
        if len(allLines) < 2:
            return ['\n'.join(message.textLines) + '\n']

        messageType = allLines[0][:2]
        station = allLines[1].split()[0]

        headers = self.sp.headers.get(station, [])
        for header in headers:
            if header[:2] == messageType:
                theHeader = header

        if theHeader:
            BBB = ''
            firstLine = allLines[0].split()
            timestamp = firstLine[1]
            if len(firstLine) >= 3:
                BBB = firstLine[2]

            self.logger.debug("Type: %s, Station: %s, Headers: %s, TheHeader: %s, Timestamp: %s, BBB = %s" % (messageType, station, headers, theHeader, timestamp, BBB))
            allLines[0] = theHeader + ' ' + timestamp
            if BBB:
                allLines[0] += ' ' + BBB
            allLines.insert(1, 'AAXX ' + timestamp[:4] + '4')
        else:
            return ['\n'.join(message.textLines) + '\n']
            
        return ['\n'.join(allLines) + '\n']

    def addHeaderToMessage(self, message, textLines=None):
        """
        When no WMO header is present in the text part of an AFTN Message, we will create one 
        for each destination address in the message.
        ex: if self.drp.aftnMap['CWAOWXYZ'] == 'SACN32', the header will be 'SACN32 CWAO YYGGgg'
        where YY= day of the month, GG=hours and gg=minutes
        This method is only used at reception.

        textLines is not None for big messages (text is not in the message, but in a supplementary 
        variable.
        """
        import dateLib
        wmoMessages = []
        addresses = message.destAddress
        default = self.drp.aftnMap.get('DEFAULT')
        timestamp = dateLib.getYYGGgg()

        destLine = message.createDestinationAddressLine().strip() 
        originLine = message.createOriginAddressLine().strip() 

        destOriginLines = [destLine, originLine]

        self.logger.debug("Addresses: %s" % addresses)

        for address in addresses:
            header = self.drp.aftnMap.get(address, default) + " " + address[0:4] + " " + timestamp
            headerBlock = [header] + destOriginLines

            #self.logger.info("Header in addHeader: %s" % header)
            if textLines:
                wmoMessages.append('\n'.join(headerBlock + textLines) + '\n')
            else:
                wmoMessages.append('\n'.join(headerBlock + message.textLines) + '\n')
        return wmoMessages

    def doSpecialOrders(self, path):
        # Stop, restart, reload, deconnect, connect could be put here?
        reader = DiskReader(path)
        reader.read()
        dataFromFiles = reader.getFilenamesAndContent()
        for index in range(len(dataFromFiles)): 
            words = dataFromFiles[index][0].strip().split() 
            self.logger.info("Special Order: %s" % (dataFromFiles[index][0].strip()))

            if words[0] == 'outCSN':
                if words[1] == '+':
                    self.nextCSN()
                    self.logger.info("CSN = %s" % self.CSN)
                elif words[1] == '-':
                    # This case is only done for testing purpose. It is not complete and not correct when CSN 
                    # value is 0 or 1
                    self.nextCSN(str(int(self.CSN) - 2))
                    self.logger.info("CSN = %s" % self.CSN)
                elif words[1] == 'print':
                    self.logger.info("CSN = %s" % self.CSN)
                else:
                    # We suppose it's a number, we don't verify!!
                    self.nextCSN(words[1])
                    self.logger.info("CSN = %s" % self.CSN)

            elif words[0] == 'inCSN':
                if words[1] == '+':
                    self.calcWaitedTID(self.waitedTID)
                    self.logger.info("Waited TID = %s" % self.waitedTID)
                elif words[1] == '-':
                    # This case is only done for testing purpose. It is not complete and not correct when waited TID
                    # value is 0 or 1
                    self.calcWaitedTID(self.otherStationID + "%04d" % (int(self.waitedTID[3:]) - 2))
                    self.logger.info("Waited TID = %s" % self.waitedTID)
                elif words[1] == 'print':
                    self.logger.info("Waited TID = %s" % self.waitedTID)
                else:
                    # We suppose it's a number, we don't verify!!
                    self.calcWaitedTID(self.otherStationID + "%04d" % int(words[1]))
                    self.logger.info("Waited TID = %s" % self.waitedTID)

            elif words[0] == 'ackWaited':
                if words[1] == 'print':
                    self.logger.info("Waiting for ack: %s" % self.getWaitingForAck())
                else:
                    self.setWaitingForAck(words[1])
                    self.incrementSendingInfos()
            elif words[0] == 'ackNotWaited':
                self.setWaitingForAck(None)
                self.resetSendingInfos()
                self.updatePartsToSend()
            elif words[0] == 'ackUsed':
                self.ackUsed = words[1] == 'True' or words[1] == 'true'
            elif words[0] == 'printState':
                self.logger.info(self.state.infos()) 
            else:
                pass

            try:
                os.unlink(dataFromFiles[0][1])
                self.logger.debug("%s has been erased", os.path.basename(dataFromFiles[index][1]))
            except OSError, e:
                (type, value, tb) = sys.exc_info()
                self.logger.error("Unable to unlink %s ! Type: %s, Value: %s" % (dataFromFiles[index][1], type, value))
示例#10
0
class MessageManager:
    """
    A typical message:

    <SOH> ABC0003 14033608<CR><LF>                               <-- Heading line
    GG CYYCYFYX EGLLZRZX<CR><LF>                                 <-- Destination address line
    140335 CYEGYFYX<CR><LF>                                      <-- Origin address line

    <STX>040227 NOTAMN CYYC CALGARY INTL<CR><LF>                 <-- Start of text signal (<STX>)
    CYYC ILS 16 AND 34 U/S 0409141530<CR><LF>                    <-- Message text
    TIL 0409141800<CR><LF>

    <VT><ETX>                                                    <-- End of message signal

    """
    def __init__(self, logger=None, sourlient=None, reloadMode=False):

        AFTNPaths.normalPaths(sourlient.name)
        PXPaths.normalPaths()

        self.logger = logger  # Logger object
        self.sourlient = sourlient  # Sourlient object

        self.name = sourlient.name  # Transceiver's name
        self.stationID = sourlient.stationID  # Value read from config. file
        self.otherStationID = sourlient.otherStationID  # Provider (MHS) Station ID
        self.address = sourlient.address  # 8-letter group identifying the message originator (CYHQUSER)
        self.otherAddress = sourlient.otherAddress  # 8-letter group identifying the provider's address (CYHQMHSN)
        self.routingTable = sourlient.routingTable  # Routing table name
        self.subscriber = sourlient.subscriber  # Boolean indicating if this is a subscriber or a provider

        self.bullManager = bulletinManager(PXPaths.RXQ + self.name,
                                           self.logger,
                                           PXPaths.RXQ + self.name, 9999, '\n',
                                           self.sourlient.extension,
                                           self.routingTable, None,
                                           self.sourlient, True)

        self.drp = self.bullManager.drp
        self.sp = StationParser(PXPaths.STATION_TABLE, logger)
        self.sp.parse()
        self.priorities = {
            '1': 'FF',
            '2': 'FF',
            '3': 'GG',
            '4': 'GG',
            '5': 'GG'
        }

        if not reloadMode:
            self.afterInit()

    def afterInit(self):
        self.messageIn = None  # Last AFTN message received
        self.messageOut = None  # Last AFTN message sent
        self.fromDisk = True  # Changed to False for Service Message created on the fly
        self.filenameToSend = None  # Filename of the file we want to send or just sent
        self.filenameToErase = None  # Path + filename of the file we want to erase (ack just received)

        self.type = None  # Message type. Value must be in ['AFTN', 'SVC', 'RF', 'RQ', 'PRI_DESTADD_TEXT', None]
        self.header = None  # Message WMO Header
        self.priority = None  # Priority indicator (SS, DD, FF, GG or KK)
        self.destAddress = []  # 8-letter group, max. 21 addresses
        self.CSNJustReset = False
        try:
            self.CSN = self.state.CSN
            self.logger.info("CSN (%s) has been taken from AFTN State" %
                             self.CSN)
        except:
            self.CSN = '0000'  # Channel sequence number, 4 digits (ex: 0003)
        self.filingTime = None  # 6-digits DDHHMM (ex:140335) indicating date and time of filing the message for transmission.
        self.dateTime = None  # 8-digits DDHHMMSS (ex:14033608)
        self.readBuffer = ''  # Buffer where we put stuff read from the socket

        # Queueing Service Messages when waiting for an ack before sending
        self.serviceQueue = []

        # Big message support (sending)
        self.partsToSend = []  # Text parts of the broken message
        self.numberOfParts = len(
            self.partsToSend
        )  # Number of parts in which a long message has been divided.
        self.nextPart = 0  # Always 0 for message that are not bigger than the defined max.

        # Big message support (receiving)
        self.receivedParts = []
        self.notCompletelyReceived = False
        self.generalPartsRegex = re.compile(r"//END PART \d\d//")
        self.lastPartRegex = re.compile(r"//END PART \d\d/\d\d//")

        # Ack support (ack we receive because of the message we have sent)
        try:
            self.lastAckReceived = self.state.lastAckReceived
            self.logger.info(
                "lastAckReceived (%s) has been taken from AFTN State" %
                self.lastAckReceived)

            self.waitingForAck = self.state.waitingForAck
            self.logger.info(
                "waitingForAck (%s) has been taken from AFTN State" %
                self.waitingForAck)
        except:
            self.lastAckReceived = None  # None or the transmitID
            self.waitingForAck = None  # None or the transmitID

        self.sendingInfos = (
            0, None
        )  # Number of times a message has been sent and the sending time.
        self.maxAckTime = self.sourlient.maxAckTime  # Maximum time (in seconds) we wait for an ack, before resending.
        self.maxSending = 1  # Maximum number of sendings of a message
        self.ackUsed = self.sourlient.ackUsed  # We can use ack or not (testing purposes only)
        self.totAck = 0  # Count the number of ack (testing purpose only)

        # CSN verification (receiving)
        try:
            self.waitedTID = self.state.waitedTID
        except:
            self.waitedTID = self.otherStationID + '0001'  # Initially (when the program start) we are not sure what TID is expected

        # Functionnality testing switches
        self.resendSameMessage = True

        # Read Buffer management
        self.unusedBuffer = ''  # Part of the buffer that was not used

        # Persistent state
        min = 60
        now = time.mktime(time.gmtime())

        if self.subscriber:
            try:
                lastUpdate = os.stat(AFTNPaths.STATE)[8]
                diff = now - lastUpdate
                if diff < 60 * min:
                    self.state = self.unarchiveObject(AFTNPaths.STATE)
                    self.updateFromState(self.state)
                else:
                    self.logger.warning(
                        "Archive state not read because too old (%i minutes)" %
                        int(diff / 60))
                    raise
            except:
                self.state = StateAFTN()
                self.state.fill(self)
                self.archiveObject(AFTNPaths.STATE, self.state)
        else:
            try:
                self.state = self.unarchiveObject(AFTNPaths.STATE)
                self.updateFromState(self.state)
            except:
                self.state = StateAFTN()
                self.state.fill(self)
                self.archiveObject(AFTNPaths.STATE, self.state)

    def resetCSN(self):
        hhmm = dateLib.getTodayFormatted('%H%M')
        hh = hhmm[:2]
        mm = hhmm[2:]

        if hh == '00' and mm <= '05' and not self.CSNJustReset:
            self.CSN = '0000'
            self.CSNJustReset = True
            self.logger.info('CSN has been reset to 0001')
        elif mm > '05':
            self.CSNJustReset = False

    def updateFromState(self, state):
        self.CSN = state.CSN
        self.waitedTID = state.waitedTID

        self.lastAckReceived = state.lastAckReceived
        self.waitingForAck = state.waitingForAck

    def ingest(self, bulletin):
        self.bullManager.writeBulletinToDisk(bulletin)

    def completeHeader(self, message):
        import dateLib
        wmoMessage = ''
        theHeader = ''
        allLines = []
        # We remove blank lines
        for line in message.textLines:
            if line:
                allLines.append(line)

        # We don't have enough lines
        if len(allLines) < 2:
            return ['\n'.join(message.textLines) + '\n']

        messageType = allLines[0][:2]
        station = allLines[1].split()[0]

        headers = self.sp.headers.get(station, [])
        for header in headers:
            if header[:2] == messageType:
                theHeader = header

        if theHeader:
            BBB = ''
            firstLine = allLines[0].split()
            timestamp = firstLine[1]
            if len(firstLine) >= 3:
                BBB = firstLine[2]

            self.logger.debug(
                "Type: %s, Station: %s, Headers: %s, TheHeader: %s, Timestamp: %s, BBB = %s"
                % (messageType, station, headers, theHeader, timestamp, BBB))
            allLines[0] = theHeader + ' ' + timestamp
            if BBB:
                allLines[0] += ' ' + BBB
            allLines.insert(1, 'AAXX ' + timestamp[:4] + '4')
        else:
            return ['\n'.join(message.textLines) + '\n']

        return ['\n'.join(allLines) + '\n']

    def addHeaderToMessage(self, message, textLines=None):
        """
        When no WMO header is present in the text part of an AFTN Message, we will create one 
        for each destination address in the message.
        ex: if self.drp.aftnMap['CWAOWXYZ'] == 'SACN32', the header will be 'SACN32 CWAO YYGGgg'
        where YY= day of the month, GG=hours and gg=minutes
        This method is only used at reception.

        textLines is not None for big messages (text is not in the message, but in a supplementary 
        variable.
        """
        import dateLib
        wmoMessages = []
        addresses = message.destAddress
        default = self.drp.aftnMap.get('DEFAULT')
        timestamp = dateLib.getYYGGgg()

        destLine = message.createDestinationAddressLine().strip()
        originLine = message.createOriginAddressLine().strip()

        destOriginLines = [destLine, originLine]

        self.logger.debug("Addresses: %s" % addresses)

        for address in addresses:
            header = self.drp.aftnMap.get(
                address, default) + " " + address[0:4] + " " + timestamp
            headerBlock = [header] + destOriginLines

            #self.logger.info("Header in addHeader: %s" % header)
            if textLines:
                wmoMessages.append('\n'.join(headerBlock + textLines) + '\n')
            else:
                wmoMessages.append('\n'.join(headerBlock + message.textLines) +
                                   '\n')
        return wmoMessages

    def doSpecialOrders(self, path):
        # Stop, restart, reload, deconnect, connect could be put here?
        reader = DiskReader(path)
        reader.read()
        dataFromFiles = reader.getFilenamesAndContent()
        for index in range(len(dataFromFiles)):
            words = dataFromFiles[index][0].strip().split()
            self.logger.info("Special Order: %s" %
                             (dataFromFiles[index][0].strip()))

            if words[0] == 'outCSN':
                if words[1] == '+':
                    self.nextCSN()
                    self.logger.info("CSN = %s" % self.CSN)
                elif words[1] == '-':
                    # This case is only done for testing purpose. It is not complete and not correct when CSN
                    # value is 0 or 1
                    self.nextCSN(str(int(self.CSN) - 2))
                    self.logger.info("CSN = %s" % self.CSN)
                elif words[1] == 'print':
                    self.logger.info("CSN = %s" % self.CSN)
                else:
                    # We suppose it's a number, we don't verify!!
                    self.nextCSN(words[1])
                    self.logger.info("CSN = %s" % self.CSN)

            elif words[0] == 'inCSN':
                if words[1] == '+':
                    self.calcWaitedTID(self.waitedTID)
                    self.logger.info("Waited TID = %s" % self.waitedTID)
                elif words[1] == '-':
                    # This case is only done for testing purpose. It is not complete and not correct when waited TID
                    # value is 0 or 1
                    self.calcWaitedTID(self.otherStationID + "%04d" %
                                       (int(self.waitedTID[3:]) - 2))
                    self.logger.info("Waited TID = %s" % self.waitedTID)
                elif words[1] == 'print':
                    self.logger.info("Waited TID = %s" % self.waitedTID)
                else:
                    # We suppose it's a number, we don't verify!!
                    self.calcWaitedTID(self.otherStationID +
                                       "%04d" % int(words[1]))
                    self.logger.info("Waited TID = %s" % self.waitedTID)

            elif words[0] == 'ackWaited':
                if words[1] == 'print':
                    self.logger.info("Waiting for ack: %s" %
                                     self.getWaitingForAck())
                else:
                    self.setWaitingForAck(words[1])
                    self.incrementSendingInfos()
            elif words[0] == 'ackNotWaited':
                self.setWaitingForAck(None)
                self.resetSendingInfos()
                self.updatePartsToSend()
            elif words[0] == 'ackUsed':
                self.ackUsed = words[1] == 'True' or words[1] == 'true'
            elif words[0] == 'printState':
                self.logger.info(self.state.infos())
            else:
                pass

            try:
                os.unlink(dataFromFiles[0][1])
                self.logger.debug("%s has been erased",
                                  os.path.basename(dataFromFiles[index][1]))
            except OSError, e:
                (type, value, tb) = sys.exc_info()
                self.logger.error("Unable to unlink %s ! Type: %s, Value: %s" %
                                  (dataFromFiles[index][1], type, value))
示例#11
0
    def _appendToFile(self):
        headers = self.stations.keys()
        headers.sort()

        for header in headers:
            #print "%s: %s" % (header, self.stations[header])
            stations = ''
            for station in self.stations[header]:
                if stations:
                    stations += ' ' + station
                else:
                    stations = station

            coll = ''

            if self.stationsColl:
                if self.stationsColl.has_key(header):
                    coll = 'COLL'

            line = "%s:%s:%s:\n" % (header, coll, stations)
            self.file.write(line)

if __name__ == '__main__':
    from StationParser import StationParser
    sp = StationParser('/apps/px/etc/stations.conf')
    sp.parse()

    sfc = StationFileCreator(stations=sp.stations, stationsColl=sp.stationsColl)
    #sfc.appendToFile()
    #sfc.closeFile()
示例#12
0
    def _findSA(self, stations, date):
        # Partial header request (Type + station(s))
        # ex: SA CYOW CYUL
        # Note: Once we find the better match, we take the header we found (ex: SACN31 CWAO) and we replace the
        # A by a P (ex: SPCN31 CWAO) and try to find a speci for the station. The speci must be between the full
        # hour of the SA and the request time.
        from StationParser import StationParser
        from SAParser import SAParser

        results = [
        ]  # ex: [('CYOW', SA_LINE, SA_HEADER_TIME, SA_FILE, SA_FILE_TIME, SP_LINE, SP_HEADER_TIME, SP_FILE, SP_FILE_TIME), ('CYUL', ...)]

        sp = StationParser(PXPaths.ETC + 'stations_SA.conf')
        sp.parse()

        for station in stations:
            threeCharHeaders = []

            if len(station) == 3:
                #print ("%s => we will search for %s first, if we obtain no results, we will search for %s" % (station, 'C' + station, station))
                threeCharHeaders = sp.headers.get(station, [])
                station = 'C' + station
                headers = sp.headers.get(station, [])

            elif station[0] == 'C':
                #print("%s is a canadian station" % station)
                headers = sp.headers.get(station, [])

            elif station[0] == 'K':
                #print("%s is an american station" % station)
                headers = sp.headers.get(station, [])

            else:
                #print("%s is an international station" % station)
                headers = sp.headers.get(station, [])

            filesToParse = self._getFilesToParse(PXPaths.DB + date + '/SA/',
                                                 headers,
                                                 DBSearcher.EXCLUDED_SOURCES)
            theLine, bestHeaderTime, theFile, bestFileTime = self._findMoreRecentStation(
                SAParser(''), filesToParse, station)

            if not theLine and threeCharHeaders:
                # If not successful at finding the 4 chars station when the original request was for a 3 chars station
                # we try the 3 chars case
                #print 'We are searching for the 3 chars station'
                station = station[1:]
                filesToParse = self._getFilesToParse(
                    PXPaths.DB + date + '/SA/', threeCharHeaders,
                    DBSearcher.EXCLUDED_SOURCES)
                theLine, bestHeaderTime, theFile, bestFileTime = self._findMoreRecentStation(
                    SAParser(''), filesToParse, station)

            if theLine:
                theLine += '='
                parts = os.path.basename(theFile).split('_')
                header = parts[0] + ' ' + parts[1]
                speciLine, speciHeaderTime, speciFile, speciFileTime = self._findSpeci(
                    station, header, bestHeaderTime, date)

                if speciHeaderTime and (speciHeaderTime < bestHeaderTime):
                    surround = 30 * '='
                    #print 'Speci found has been rejected (%s < %s)' % (speciHeaderTime, bestHeaderTime)
                    speciLine, speciHeaderTime, speciFile, speciFileTime = None, 0, None, 0
                    #print "%s END SPECI INFOS %s\n" % (surround, surround)

            else:
                speciLine, speciHeaderTime, speciFile, speciFileTime = None, 0, None, 0

            results.append(
                (station, theLine, bestHeaderTime, theFile, bestFileTime,
                 speciLine, speciHeaderTime, speciFile, speciFileTime))

        return results