Beispiel #1
0
    def rebuild(self, raw_packet):
        [ver_ihl,
         self.tos,
         self.tot_len,
         self.id,
         flag_offset,
         self.ttl,
         self.proto] = struct.unpack('!BBHHHBB', raw_packet[0:10])
        [self.check] = struct.unpack('H', raw_packet[10:12])
        [src_ip, dst_ip] = struct.unpack('!4s4s', raw_packet[12:20])

        self.ver = (ver_ihl & 0xf0) >> 4
        self.ihl = ver_ihl & 0x0f

        self.flag_df = (flag_offset & 0x40) >> 14
        self.flag_mf = (flag_offset & 0x20) >> 13
        self.offset = flag_offset & 0x1f

        self.src = socket.inet_ntoa(src_ip)
        self.dst = socket.inet_ntoa(dst_ip)

        self.data = raw_packet[self.ihl * 4:self.tot_len]

        # Check the checksum
        header = raw_packet[:self.ihl * 4]

        if checksum(header) != 0:
            raise ChecksumError('IP')
Beispiel #2
0
    def rebuild(self, raw_packet):
        # disassemble tcp header
        [
            self.src_port, self.dst_port, self.seq_no, self.ack_no, offset_res,
            flags, self.win_size
        ] = struct.unpack('!HHLLBBH', raw_packet[0:16])
        [self.check] = struct.unpack('H', raw_packet[16:18])
        [self.urgent] = struct.unpack('!H', raw_packet[18:20])

        # get header length
        self.doff = offset_res >> 4

        # get flags
        self.fin = flags & 0x01
        self.syn = (flags & 0x02) >> 1
        self.rst = (flags & 0x04) >> 2
        self.psh = (flags & 0x08) >> 3
        self.ack = (flags & 0x10) >> 4
        self.urg = (flags & 0x20) >> 5

        # get data
        self.data = raw_packet[self.doff * 4:]

        # assemble pseudo header to calculate checksum
        pseudo_header = struct.pack(
            '!4s4sBBH',
            socket.inet_aton(self.src_ip),
            socket.inet_aton(self.dst_ip),
            0,
            socket.IPPROTO_TCP,  # Protocol,
            self.doff * 4 + len(self.data))
        if checksum(pseudo_header + raw_packet) != 0:
            raise ChecksumError('TCP')
Beispiel #3
0
    def rebuild(self, raw_packet):
        # disassemble tcp header
        [self.src_port,
         self.dst_port,
         self.seq_no,
         self.ack_no,
         offset_res,
         flags,
         self.win_size] = struct.unpack('!HHLLBBH', raw_packet[0:16])
        [self.check] = struct.unpack('H', raw_packet[16:18])
        [self.urgent] = struct.unpack('!H', raw_packet[18:20])

        # get header length
        self.doff = offset_res >> 4

        # get flags
        self.fin = flags & 0x01
        self.syn = (flags & 0x02) >> 1
        self.rst = (flags & 0x04) >> 2
        self.psh = (flags & 0x08) >> 3
        self.ack = (flags & 0x10) >> 4
        self.urg = (flags & 0x20) >> 5

        # get data
        self.data = raw_packet[self.doff * 4:]

        # assemble pseudo header to calculate checksum
        pseudo_header = struct.pack('!4s4sBBH',
                                    socket.inet_aton(self.src_ip),
                                    socket.inet_aton(self.dst_ip),
                                    0,
                                    socket.IPPROTO_TCP,  # Protocol,
                                    self.doff * 4 + len(self.data))
        if checksum(pseudo_header + raw_packet) != 0:
            raise ChecksumError('TCP')
Beispiel #4
0
def translate_gps_to_hex():
    import binascii
    from utility import checksum
    nmea = 'GPRMC,080000,A,4800.97,N,12166.03,W,32.1,045.0,160813,,*'
    s = '$' + nmea + checksum(nmea)['checksum']
    log.trace(s)
    log.trace(binascii.hexlify(s))
Beispiel #5
0
    def build(self):
        offset_res = (self.doff << 4) + 0
        self.check = 0

        flags = self.fin + \
                (self.syn << 1) + \
                (self.rst << 2) + \
                (self.psh << 3) + \
                (self.ack << 4) + \
                (self.urg << 5)

        # assemble tcp header without checksum
        tcp_header = struct.pack('!HHLLBBHHH',
                                 self.src_port,
                                 self.dst_port,
                                 self.seq_no,
                                 self.ack_no,
                                 offset_res,
                                 flags,
                                 self.win_size,
                                 self.check,
                                 self.urgent)

        # assemble pseudo header to calculate checksum
        pseudo_header = struct.pack('!4s4sBBH',
                                    socket.inet_aton(self.src_ip),
                                    socket.inet_aton(self.dst_ip),
                                    0,
                                    socket.IPPROTO_TCP,  # Protocol,
                                    self.doff * 4 + len(self.data))

        self.check = checksum(pseudo_header + tcp_header + self.data)

        # finally assemble tcp header
        tcp_header = struct.pack('!HHLLBBH',
                                 self.src_port,
                                 self.dst_port,
                                 self.seq_no,
                                 self.ack_no,
                                 offset_res,
                                 flags,
                                 self.win_size) + \
                     struct.pack('H', self.check) + \
                     struct.pack('!H', self.urgent)

        return tcp_header + self.data
Beispiel #6
0
    def build(self):
        offset_res = (self.doff << 4) + 0
        self.check = 0

        flags = self.fin + \
                (self.syn << 1) + \
                (self.rst << 2) + \
                (self.psh << 3) + \
                (self.ack << 4) + \
                (self.urg << 5)

        # assemble tcp header without checksum
        tcp_header = struct.pack('!HHLLBBHHH', self.src_port, self.dst_port,
                                 self.seq_no, self.ack_no, offset_res, flags,
                                 self.win_size, self.check, self.urgent)

        # assemble pseudo header to calculate checksum
        pseudo_header = struct.pack(
            '!4s4sBBH',
            socket.inet_aton(self.src_ip),
            socket.inet_aton(self.dst_ip),
            0,
            socket.IPPROTO_TCP,  # Protocol,
            self.doff * 4 + len(self.data))

        self.check = checksum(pseudo_header + tcp_header + self.data)

        # finally assemble tcp header
        tcp_header = struct.pack('!HHLLBBH',
                                 self.src_port,
                                 self.dst_port,
                                 self.seq_no,
                                 self.ack_no,
                                 offset_res,
                                 flags,
                                 self.win_size) + \
                     struct.pack('H', self.check) + \
                     struct.pack('!H', self.urgent)

        return tcp_header + self.data
Beispiel #7
0
    def generate_nmea_string(self, time, date, lat, long, speed):
        """ Generate a NMEA string for GPS simulation with given data.
        'time' should be a list of three values, e.g., [hour,minute,second] in 24-hour format
        'date' should be a list of three values, e.g., [day,month,year] in numerical (2-digit)
            format
        """

        result = {'string': ''}

        try:

            # determine time values from list
            hour = str(time[0]) if time[0] > 9 else '0%s' % time[0]
            min = str(time[1]) if time[1] > 9 else '0%s' % time[1]
            sec = str(time[2]) if time[2] > 9 else '0%s' % time[2]
            # determine date values from list
            day = str(date[0]) if date[0] > 9 else '0%s' % date[0]
            month = str(date[1]) if date[1] > 9 else '0%s' % date[1]
            year = str(date[2]) if date[2] > 9 else '0%s' % date[2]
            fixtime = '%s%s%s' %(hour,min,sec)
            fixlat = '%s,N' %lat if lat > 0 else '%s,S' % abs(lat)
            fixlong = '%s,E' %long if long > 0 else '%s,W' % abs(long)
            speed = speed
            angle = '045.0'
            fixdate = '%s%s%s' %(day,month,year)
            #self.log.trace("Fix Date: %s." % fixdate)
            magvar = ','
            base_string = 'GPRMC,%(time)s,A,%(lat)s,%(long)s,%(speed)s,%(angle)s,%(date)s,%(magvar)s*'\
                          % {'time':fixtime,'lat':fixlat,'long':fixlong,'speed':speed,'angle':angle,
                             'date':fixdate,'magvar':magvar}
            chk = checksum(base_string)['checksum']

            result['string'] = '$' + base_string + chk+'\n'

            result['successful'] = True
        except BaseException, e:
            self.handle_exception(e, operation="generate nmea string")
Beispiel #8
0
    def build(self):
        self.id = randint(0, 65535)
        self.tot_len = self.ihl * 4 + len(self.data)
        src_ip = socket.inet_aton(self.src)
        dst_ip = socket.inet_aton(self.dst)

        # assemble header without checksum
        ip_header = struct.pack('!BBHHHBBH4s4s',
                                (self.ver << 4) + self.ihl,
                                self.tos,
                                self.tot_len,
                                self.id,
                                (((self.flag_df << 1) + self.flag_mf) << 13) + self.offset,
                                self.ttl,
                                self.proto,
                                self.check,
                                src_ip,
                                dst_ip)

        self.check = checksum(ip_header)

        # reassemble header with checksum
        ip_header_new = struct.pack('!BBHHHBB',
                                    (self.ver << 4) + self.ihl,
                                    self.tos,
                                    self.tot_len,
                                    self.id,
                                    (((self.flag_df << 1) + self.flag_mf) << 13) + self.offset,
                                    self.ttl,
                                    self.proto) + \
                        struct.pack('H', self.check) + \
                        struct.pack('!4s4s', src_ip, dst_ip)

        packet = ip_header_new + self.data

        return packet
Beispiel #9
0
def updateDatasetVersion(dset, dsetVersion, pathlist, session, handler, cfHandler, configOptions, aggregateDimensionName=None, offline=False, progressCallback=None, stopEvent=None, extraFields=None, replace=False, forceRescan=False, **context):

    if replace:
        info("Replacing files in dataset: %s, version %d"%(dset.name, dsetVersion.version))
    else:
        info("Updating files in dataset: %s, version %d"%(dset.name, dsetVersion.version))

    haveLatestDsetVersion = (dsetVersion.version == dset.getVersion())

    # Get the list of FileVersion objects for this version
    locdict = {}
    todelete = {}
    for fobj in dsetVersion.getFileVersions():
        loc = fobj.location
        locdict[loc] = todelete[loc] = fobj

    varlocate = configOptions['variable_locate']
    checksumClient = configOptions['checksumClient']
    checksumType = configOptions['checksumType']

    # Get the base dictionary for the entire dataset
    basedict = dset.getBaseDictionary()

    # For each item in the pathlist:
    seq = 0
    fileModified = False                # Any file has been modified (added, replaced, or deleted)
    newFileVersionObjs = []
    nfiles = len(pathlist)
    for path, sizet in pathlist:

        # Rescan this file if it has been added, or replaced
        rescanFile = haveLatestDsetVersion

        size, mtime=sizet
        csum = None
        csumtype = checksumType
        techNotes = None
        techNotesTitle = None
        datasetTechNotes = None
        datasetTechNotesTitle = None
        if extraFields is not None:
            csum = extraFieldsGet(extraFields, (dset.name, path, 'checksum'), dsetVersion)
            csumtype = extraFieldsGet(extraFields, (dset.name, path, 'checksum_type'), dsetVersion)
            techNotes = extraFields.get((dset.name, -1, path, 'tech_notes'), None)
            techNotesTitle = extraFields.get((dset.name, -1, path, 'tech_notes_title'), None)
            datasetTechNotes = extraFields.get((dset.name, -1, path, 'dataset_tech_notes'), None)
            datasetTechNotesTitle = extraFields.get((dset.name, -1, path, 'dataset_tech_notes_title'), None)
        if csum is None and not offline and checksumClient is not None:
            csum = checksum(path, checksumClient)
            csumtype = checksumType

        # Cache the dataset tech notes info for later use
        if datasetTechNotes is not None:
            dset.dataset_tech_notes = datasetTechNotes
            dset.dataset_tech_notes_title = datasetTechNotesTitle

        # Check if 'from_file' was specified for this file
        fromfile = None
        if extraFields is not None:
            fromfile = extraFieldsGet(extraFields, (dset.name, path, 'from_file'), dsetVersion)
        if fromfile is None:
            oldpath = path
        else:
            frombase = os.path.basename(fromfile)
            tobase = os.path.basename(path)
            if frombase!=tobase:
                info("Basenames are different for files: %s and %s. Ignoring 'from_file' option."%(path, fromfile))
                oldpath = path
            else:
                oldpath = fromfile

        # If the item is in the current dataset version, get the file version obj and add to the list
        if locdict.has_key(oldpath):
            del todelete[oldpath]
            fileVersionObj = locdict[oldpath]
            fileObj = fileVersionObj.parent
            
            # If the file matches the existing file version, no-op, ...
            if os.path.exists(oldpath) and compareFiles(fileVersionObj, handler, path, size, offline, checksum=csum):
                if not forceRescan:
                    info("File %s exists, skipping"%path)
                newFileVersionObjs.append(fileVersionObj)
                rescanFile = False

            # ... else create a new version of the file
            else:
                if oldpath!=path:
                    info("Replacing file %s"%oldpath)
                newFileVersionObj = FileVersionFactory(fileObj, path, session, size, mod_time=mtime, checksum=csum, checksum_type=csumtype, tech_notes=techNotes, tech_notes_title=techNotesTitle)
                newFileVersionObjs.append(newFileVersionObj)
                fileObj.deleteChildren(session)
                fileModified = True

        # Else create a new file / file version object and add to the list ...
        else:
            fileObj = FileFactory(dset, path, basedict, session)
            newFileVersionObj = FileVersionFactory(fileObj, path, session, size, mod_time=mtime, checksum=csum, checksum_type=csumtype, tech_notes=techNotes, tech_notes_title=techNotesTitle)
            newFileVersionObjs.append(newFileVersionObj)
            fileModified = True

        # ... and rescan if necessary
        if rescanFile or forceRescan:
            if not offline:
                info("Scanning %s"%path)
                f = handler.openPath(path)
                extractFromFile(dset, f, fileObj, session, cfHandler, aggdimName=aggregateDimensionName, varlocate=varlocate, **context)
                f.close()
            else:
                info("File %s is offline"%path)

        # Callback progress
        seq += 1
        try:
            issueCallback(progressCallback, seq, nfiles, 0, 1, stopEvent=stopEvent)
        except:
            session.rollback()
            session.close()
            raise

    # If updating, add the file version objects ...
    if not replace:
        for fileVersionObj in todelete.values():
            newFileVersionObjs.append(fileVersionObj)

    # ... else if rescanning delete the file object children
    elif haveLatestDsetVersion:
        for fileVersionObj in todelete.values():
            fileObj = fileVersionObj.parent
            fileObj.deleteChildren(session)
            fileModified = True

    # Create a new dataset version if:
    # - a file has been added, replaced, or deleted, and
    # - the current version is the latest
    createNewDatasetVersion = haveLatestDsetVersion and fileModified
    
    return createNewDatasetVersion, newFileVersionObjs
Beispiel #10
0
def createDataset(dset, pathlist, session, handler, cfHandler, configOptions, aggregateDimensionName=None, offline=False, progressCallback=None, stopEvent=None, extraFields=None, masterGateway=None, **context):

    fobjlist = []                       # File objects in the dataset
    nfiles = len(pathlist)

    basedict = {}                       # file.base => 1
    varlocate = configOptions['variable_locate']
    checksumClient = configOptions['checksumClient']
    checksumType = configOptions['checksumType']
    seq = 0
    for path, sizet in pathlist:
        size, mtime = sizet

        csum = None
        csumtype = checksumType
        techNotes = None
        techNotesTitle = None
        datasetTechNotes = None
        datasetTechNotesTitle = None
        if extraFields is not None:
            csum = extraFields.get((dset.name, -1, path, 'checksum'), None)
            csumtype = extraFields.get((dset.name, -1, path, 'checksum_type'), None)
            techNotes = extraFields.get((dset.name, -1, path, 'tech_notes'), None)
            techNotesTitle = extraFields.get((dset.name, -1, path, 'tech_notes_title'), None)
            datasetTechNotes = extraFields.get((dset.name, -1, path, 'dataset_tech_notes'), None)
            datasetTechNotesTitle = extraFields.get((dset.name, -1, path, 'dataset_tech_notes_title'), None)
        if csum is None and not offline and checksumClient is not None:
            csum = checksum(path, checksumClient)
            csumtype = checksumType

        # Cache the dataset tech notes info for later use
        if datasetTechNotes is not None:
            dset.dataset_tech_notes = datasetTechNotes
            dset.dataset_tech_notes_title = datasetTechNotesTitle

        # Create a file and version
        base = generateFileBase(path, basedict, dset.name)
        file = File(base, 'netCDF')
        basedict[base] = 1
        fileVersion = FileVersion(1, path, size, mod_time=mtime, checksum=csum, checksum_type=csumtype, tech_notes=techNotes, tech_notes_title=techNotesTitle)
        file.versions.append(fileVersion)
        fobjlist.append(fileVersion)
        seq += 1

        dset.files.append(file)

        # Extract the dataset contents
        if not offline:
            info("Scanning %s"%path)
            f = handler.openPath(path)
            extractFromFile(dset, f, file, session, cfHandler, aggdimName=aggregateDimensionName, varlocate=varlocate, **context)
            f.close()
        else:
            info("File %s is offline"%path)

        # Callback progress
        try:
            issueCallback(progressCallback, seq, nfiles, 0, 1, stopEvent=stopEvent)
        except:
            session.rollback()
            session.close()
            raise

    return True, fobjlist