def zmqPublish(opts, q): p = ZmqPublisher(**ZmqPublisher.getOptionValues(opts)) p.start() for line in q: msg = 'traclink:' + line logging.debug('publishing: %s', msg) p.pubStream.send(msg)
def zmqPublish(opts, q): p = ZmqPublisher(**ZmqPublisher.getOptionValues(opts)) p.start() for line in q: msg = 'gpsposition:%s:%s:' % (opts.evaNumber, opts.trackName) + line logging.debug('publishing: %s', msg) p.pubStream.send(msg)
def main(): import optparse parser = optparse.OptionParser('usage: %prog') ZmqPublisher.addOptions(parser, 'zmqPublish') parser.add_option( '-p', '--prefix', default='', help= 'Prefix to prepend to incoming lines before publishing them (DO include trailing "." if you want it)' ) opts, args = parser.parse_args() if args: parser.error('expected no args') logging.basicConfig(level=logging.DEBUG) # set up networking p = ZmqPublisher(**ZmqPublisher.getOptionValues(opts)) p.start() # start publishing an arbitrary message that central should forward pubTimer = ioloop.PeriodicCallback(lambda: pubMessage(opts.prefix, p), 0.1) pubTimer.start() zmqLoop()
def zmqPublish(opts, q): p = ZmqPublisher(**ZmqPublisher.getOptionValues(opts)) p.start() for line in q: msg = '%s:%s:%s:' % (opts.dataTopic, opts.evaNumber, opts.trackName) + line logging.debug('publishing: %s', msg) updateStatus(opts.evaNumber) p.pubStream.send(msg)
def main(): import optparse parser = optparse.OptionParser('usage: testLineSource.py testMessages.txt | %prog') ZmqPublisher.addOptions(parser, 'testPublisher') opts, args = parser.parse_args() if args: parser.error('expected no args') logging.basicConfig(level=logging.DEBUG) # set up networking publisher = ZmqPublisher(**ZmqPublisher.getOptionValues(opts)) publisher.start() ioloop.IOLoop.instance().add_handler(sys.stdin.fileno(), lambda fd, events: stdinHandler(publisher), ioloop.IOLoop.READ) zmqLoop()
def main(): import optparse parser = optparse.OptionParser('usage: %prog') ZmqPublisher.addOptions(parser, 'testPublishAttachments') opts, args = parser.parse_args() if args: parser.error('expected no args') logging.basicConfig(level=logging.DEBUG) # set up networking p = ZmqPublisher(**ZmqPublisher.getOptionValues(opts)) p.start() # start publishing an arbitrary message that central should forward pubTimer = ioloop.PeriodicCallback(lambda: pubMessage(p), 1000) pubTimer.start() zmqLoop()
def main(): import optparse parser = optparse.OptionParser( 'usage: testLineSource.py testMessages.txt | %prog') ZmqPublisher.addOptions(parser, 'testPublisher') opts, args = parser.parse_args() if args: parser.error('expected no args') logging.basicConfig(level=logging.DEBUG) # set up networking publisher = ZmqPublisher(**ZmqPublisher.getOptionValues(opts)) publisher.start() ioloop.IOLoop.instance().add_handler( sys.stdin.fileno(), lambda fd, events: stdinHandler(publisher), ioloop.IOLoop.READ) zmqLoop()
def main(): import optparse parser = optparse.OptionParser('usage: %prog') ZmqPublisher.addOptions(parser, 'zmqPublish') parser.add_option('-p', '--prefix', default='', help='Prefix to prepend to incoming lines before publishing them (DO include trailing "." if you want it)') opts, args = parser.parse_args() if args: parser.error('expected no args') logging.basicConfig(level=logging.DEBUG) # set up networking p = ZmqPublisher(**ZmqPublisher.getOptionValues(opts)) p.start() # start publishing an arbitrary message that central should forward pubTimer = ioloop.PeriodicCallback(lambda: pubMessage(opts.prefix, p), 0.1) pubTimer.start() zmqLoop()
class ZmqPlayback(object): def __init__(self, logPath, opts): self.logPath = logPath self.logFile = None self.log = None self.opts = opts self.publisher = ZmqPublisher(**ZmqPublisher.getOptionValues(opts)) self.publishTimer = None print 'topics:', self.opts.topic def start(self): self.publisher.start() # the delay gives a chance to connect to central before publishing self.publishTimer = ioloop.DelayedCallback(self.playLog, 100) self.publishTimer.start() def playLog(self): self.logFile = open(self.logPath, 'rb') self.log = LogParser(self.logFile) i = 0 for rec in self.log: topicMatch = False if self.opts.topic: for topic in self.opts.topic: if rec.msg.startswith(topic): topicMatch = True break else: topicMatch = True if topicMatch: self.publisher.pubStream.send(rec.msg) if i % 100 == 0: sys.stdout.write('.') sys.stdout.flush() i += 1 print print 'message count:', i
class GpsTelemetryCleanup(object): def __init__(self, opts): self.opts = opts self.subscriber = ZmqSubscriber( **ZmqSubscriber.getOptionValues(self.opts)) self.publisher = ZmqPublisher( **ZmqPublisher.getOptionValues(self.opts)) def start(self): self.publisher.start() self.subscriber.start() topics = ['gpsposition'] for topic in topics: self.subscriber.subscribeRaw(topic + ':', getattr(self, 'handle_' + topic)) def flush(self): # flush bulk saves to db if needed. currently no-op. pass def handle_gpsposition(self, topic, body): try: self.handle_gpsposition0(topic, body) except: # pylint: disable=W0702 logging.warning('%s', traceback.format_exc()) logging.warning('exception caught, continuing') def handle_gpsposition0(self, topic, body): # example: 2:$GPRMC,225030.00,A,3725.1974462,N,12203.8994696,W,,,220216,0.0,E,A*2B serverTimestamp = datetime.datetime.now(pytz.utc) if body == 'NO DATA': logging.info('NO DATA') return # parse record resourceIdStr, trackName, content = body.split(":", 2) resourceId = int(resourceIdStr) if not checkDataQuality(resourceId, content): logging.info('UNRECOGNIZED OR CORRUPT GPS SENTENCE: %s', content) return sentenceType, utcTime, activeVoid, lat, latHemi, lon,\ lonHemi, speed, heading, date, declination, declinationDir,\ modeAndChecksum = content.split(",") sourceTimestamp = datetime.datetime.strptime('%s %s' % (date, utcTime), '%d%m%y %H%M%S.%f') sourceTimestamp = sourceTimestamp.replace(tzinfo=pytz.utc) lat = parseTracLinkDM(lat, latHemi) lon = parseTracLinkDM(lon, lonHemi) # save subsystem status to cache myKey = "telemetryCleanup" status = {'lastUpdated': datetime.datetime.utcnow().isoformat()} cache.set(myKey, json.dumps(status)) # calculate which track record belongs to cacheKey = 'gpstrack.%s' % resourceId pickledTrack = cache.get(cacheKey) if pickledTrack: # cache hit, great track = pickle.loads(pickledTrack) else: # check db for a track matching this resourceId try: basaltResource = BasaltResource.objects.get( resourceId=resourceId) except ObjectDoesNotExist: logging.warning('%s', traceback.format_exc()) raise KeyError( 'Received GPS position for the EV with id %s. Please ensure there is a vehicle with that id in the BasaltResource table.' % resourceId) # Check for track name. We use explicit name if specified, otherwise # we check for an active flight and finally use the resourceId if len(trackName): logging.info("Using track name from listener: %s" % trackName) if len(trackName ) == 0: # I.e. we were not given a name for track already try: activeFlight = BasaltActiveFlight.objects.get( flight__vehicle__basaltresource=basaltResource) trackName = activeFlight.flight.name logging.info( "Using track name from BasaltActiveFlight: %s" % trackName) except ObjectDoesNotExist: trackName = basaltResource.name logging.info("Using track name from EV arg: %s" % trackName) tracks = BasaltTrack.objects.filter(name=trackName) assert len(tracks) in (0, 1) if tracks: # we already have a valid track, use that track = tracks[0] else: # must start a new track track = BasaltTrack(name=trackName, resource=basaltResource, iconStyle=DEFAULT_ICON_STYLE, lineStyle=DEFAULT_LINE_STYLE, dataType=RAW_DATA_TYPE) track.save() # set cache for next time pickledTrack = pickle.dumps(track, pickle.HIGHEST_PROTOCOL) cache.set(cacheKey, pickledTrack, TRACK_CACHE_TIMEOUT) ###################################################################### # asset position ###################################################################### # create a NewAssetPosition row params = { 'track': track, 'timestamp': sourceTimestamp, 'serverTimestamp': serverTimestamp, 'latitude': lat, 'longitude': lon, # may not have heading, but we'll try... 'heading': float(heading) if len(heading) else None, 'altitude': None, } pos = PastPosition(**params) pos.save() # note: could queue for bulk save instead cpos = CurrentPosition(**params) cpos.saveCurrent() self.publisher.sendDjango(cpos)
class FilePublisher(object): def __init__(self, opts): self.sources = ([PatternFileSource(x) for x in opts.watchFilePattern] + [DirectoryFileSource(x) for x in opts.watchDirectory] + [SymlinkFileSource(x) for x in opts.watchSymlink]) self.subtopic = opts.subtopic self.pollTimer = None self.stopPollingTime = None self.imageProcessor = None self.tmpDir = tempfile.mkdtemp(prefix='filePublisher') opts.moduleName = opts.moduleName.format(subtopic=opts.subtopic) self.publisher = ZmqPublisher(**ZmqPublisher.getOptionValues(opts)) self.subscriber = ZmqSubscriber(**ZmqSubscriber.getOptionValues(opts)) def getTopic(self, msgType): return 'geocamUtil.filePublisher.%s.%s' % (self.subtopic, msgType) def start(self): self.publisher.start() self.subscriber.start() self.subscriber.subscribeJson(self.getTopic('request'), self.handleRequest) def handleRequest(self, topic, requestDict): logging.debug('handleRequest %s', json.dumps(requestDict)) self.requestTimeout = requestDict['timeout'] self.pollPeriod = requestDict['pollPeriod'] self.timestampSpacing = requestDict.get('timestampSpacing') if 'imageResize' in requestDict or 'imageCrop' in requestDict or 'imageFormat' in requestDict: self.imageProcessor = ImageProcessor(resize=requestDict.get('imageResize'), crop=requestDict.get('imageCrop'), fmt=requestDict.get('imageFormat'), tmpDir=self.tmpDir) else: self.imageProcessor = None self.stopPollingTime = time.time() + self.requestTimeout self.publisher.sendRaw(self.getTopic('response'), 'ok') if self.pollTimer: self.pollTimer.stop() self.pollTimer = None self.pollTimer = ioloop.PeriodicCallback(self.pollHandler, self.pollPeriod * 1000) self.pollTimer.start() def pollHandler(self): try: self.pollHandler0() except: # pylint: disable=W0702 logging.warning('%s', traceback.format_exc()) def pollHandler0(self): logging.debug('pollHandler') if time.time() > self.stopPollingTime: logging.info('request timed out, stopping polling') self.pollTimer.stop() self.pollTimer = None for source in self.sources: newFileInfo = source.checkForNewFileAndRemember(self.timestampSpacing) if newFileInfo: self.publishFile(newFileInfo) def publishFile(self, fileInfo): path, mtime = fileInfo if self.imageProcessor and self.imageProcessor.isImage(path): processedPath = self.imageProcessor.processImage(path) logging.debug('sending %s', processedPath) self.publisher.sendJson(self.getTopic('file'), {'file': getFileDict(processedPath, mtime)}) os.unlink(processedPath) else: logging.debug('sending %s', path) self.publisher.sendJson(self.getTopic('file'), {'file': getFileDict(path, mtime)})
class TracLinkTelemetryCleanup(object): def __init__(self, opts): self.opts = opts self.subscriber = ZmqSubscriber(**ZmqSubscriber.getOptionValues(self.opts)) self.publisher = ZmqPublisher(**ZmqPublisher.getOptionValues(self.opts)) def start(self): self.publisher.start() self.subscriber.start() topics = ['traclink'] for topic in topics: self.subscriber.subscribeRaw(topic + ':', getattr(self, 'handle_' + topic)) def flush(self): # flush bulk saves to db if needed. currently no-op. pass def handle_traclink(self, topic, body): try: self.handle_traclink0(topic, body) except: # pylint: disable=W0702 logging.warning('%s', traceback.format_exc()) logging.warning('exception caught, continuing') def handle_traclink0(self, topic, body): # example: 13,09/19/2013,05:17:39, 2831.3070, -8038.8460, 2831.3068, -8038.8459,205.2, 0.9 serverTimestamp = datetime.datetime.now(pytz.utc) if body == 'NO DATA': logging.info('NO DATA') return # parse record targetId, d, t, shipLat, shipLon, lat, lon, shipHeading, depth = body.split(',') targetId = int(targetId) sourceTimestamp = datetime.datetime.strptime('%s %s' % (d, t), '%m/%d/%y %H:%M:%S') lat = parseTracLinkDM(lat) lon = parseTracLinkDM(lon) shipLat = parseTracLinkDM(shipLat) shipLon = parseTracLinkDM(shipLon) shipHeading = float(shipHeading) depth = float(depth) # calculate which track record belongs to cacheKey = 'traclink.track.%s' % targetId pickledTrack = cache.get(cacheKey) if pickledTrack: # cache hit, great track = pickle.loads(pickledTrack) else: # check db for a track matching this targetId try: beacon = Beacon.objects.get(targetId=targetId) except ObjectDoesNotExist: logging.warning('%s', traceback.format_exc()) raise KeyError('Received TracLink position for the beacon with targetId %s. Please ensure there is a beacon with that targetId in the plrpExplorer Beacon table.' % targetId) try: activeFlight = ActiveFlight.objects.get(flight__beacon=beacon) except ObjectDoesNotExist: raise KeyError('Received TracLink position for the beacon with targetId %s (named "%s"). Please ensure there is an active flight using that beacon in the plrpExplorer NewFlight table.' % (targetId, beacon.name)) flight = activeFlight.flight tracks = Track.objects.filter(vehicle=flight.vehicle) assert len(tracks) in (0, 1) if tracks: # we already have a valid track, use that track = tracks[0] else: # must start a new track track = Track(name=flight.name, resource=flight, iconStyle=DEFAULT_ICON_STYLE, lineStyle=DEFAULT_LINE_STYLE, dataType=RAW_DATA_TYPE) track.save() # set cache for next time pickledTrack = pickle.dumps(track, pickle.HIGHEST_PROTOCOL) cache.set(cacheKey, pickledTrack, settings.PLRP_TRACK_CACHE_TIMEOUT_SECONDS) ###################################################################### # asset position ###################################################################### # create a NewAssetPosition row params = { 'track': track, 'timestamp': serverTimestamp, 'latitude': lat, 'longitude': lon, 'heading': None, # traclink doesn't provide heading for tracked object 'depthMeters': depth, 'sourceTimestamp': sourceTimestamp, 'serverTimestamp': serverTimestamp, } pos = NewAssetPosition(**params) pos.save() # note: could queue for bulk save instead cpos = NewAssetCurrentPosition(**params) cpos.saveCurrent() self.publisher.sendDjango(cpos) # add fields to create a NewAssetPositionTracLink row params.update({ 'summary': pos, 'targetId': targetId, 'shipLatitude': shipLat, 'shipLongitude': shipLon, 'shipHeading': shipHeading, }) posTracLink = NewAssetPositionTracLink(**params) posTracLink.save() # note: could queue for bulk save instead ###################################################################### # boat position ###################################################################### params = { 'track': BOAT_TRACK, 'timestamp': serverTimestamp, 'latitude': shipLat, 'longitude': shipLon, 'heading': shipHeading, 'depthMeters': 0.0, 'sourceTimestamp': sourceTimestamp, 'serverTimestamp': serverTimestamp, } boatPos = NewAssetPosition(**params) boatPos.save() boatCPos = NewAssetCurrentPosition(**params) boatCPos.saveCurrent() self.publisher.sendDjango(boatCPos)
class FileReceiver(object): def __init__(self, opts): self.request = { 'timeout': opts.timeout, 'pollPeriod': opts.pollPeriod, } if opts.timestampSpacing: self.request['timestampSpacing'] = opts.timestampSpacing if opts.imageResize: self.request['imageResize'] = parseImageResize(opts.imageResize) if opts.imageCrop: self.request['imageCrop'] = parseImageCrop(opts.imageCrop) if opts.imageFormat: self.request['imageFormat'] = opts.imageFormat self.outputDirectory = opts.output self.subtopic = opts.subtopic self.noRequest = opts.noRequest opts.moduleName = opts.moduleName.format(subtopic=opts.subtopic) self.publisher = ZmqPublisher(**ZmqPublisher.getOptionValues(opts)) self.subscriber = ZmqSubscriber(**ZmqSubscriber.getOptionValues(opts)) self.requestPeriod = 0.5 * opts.timeout def getTopic(self, msgType): return 'geocamUtil.filePublisher.%s.%s' % (self.subtopic, msgType) def start(self): self.publisher.start() self.subscriber.start() self.subscriber.subscribeJson(self.getTopic('file'), self.handleFile) self.subscriber.subscribeRaw(self.getTopic('response'), self.handleResponse) if not self.noRequest: self.sendRequest() requestTimer = ioloop.PeriodicCallback(self.sendRequest, self.requestPeriod * 1000) requestTimer.start() def sendRequest(self): logging.debug('sendRequest') self.publisher.sendJson(self.getTopic('request'), self.request) def handleResponse(self, topic, msg): logging.debug('received response: %s', repr(msg)) # nothing to do def handleFile(self, topic, msg): try: self.handleFile0(topic, msg) except: # pylint: disable=W0702 logging.warning('%s', traceback.format_exc()) def handleFile0(self, topic, msg): f = msg['file'] outputPath = os.path.join(self.outputDirectory, f['filename']) _fmt, data = f['contents'].split(':', 1) contents = base64.b64decode(data) file(outputPath, 'w').write(contents) logging.debug('wrote %s bytes to %s', len(contents), outputPath)
class GpsTelemetryCleanup(object): def __init__(self, opts): self.opts = opts self.subscriber = ZmqSubscriber( **ZmqSubscriber.getOptionValues(self.opts)) self.publisher = ZmqPublisher( **ZmqPublisher.getOptionValues(self.opts)) def start(self): self.publisher.start() self.subscriber.start() topics = ['gpsposition', 'compass'] for topic in topics: self.subscriber.subscribeRaw(topic + ':', getattr(self, 'handle_' + topic)) def flush(self): # flush bulk saves to db if needed. currently no-op. pass def handle_gpsposition(self, topic, body): try: self.handle_gpsposition0(topic, body) except: # pylint: disable=W0702 logging.warning('%s', traceback.format_exc()) logging.warning('exception caught, continuing') def handle_compass(self, topic, body): try: self.handle_compass0(topic, body) except: # pylint: disable=W0702 logging.warning('%s', traceback.format_exc()) logging.warning('exception caught, continuing') def adjustHeading(self, compassRecord): cc = settings.COMPASS_CORRECTION compassRecord['compass'] = compassRecord['compass'] + cc if compassRecord['compass'] > 360: compassRecord['compass'] -= 360 def parseCompassData(self, compassSentence): # Sample compass NMEA sentence: $R92.3P-0.3C359.8X219.4Y-472.8Z19.7T35.4D270.1A87.7*6F compassReParsed = re.match( "\$(?P<rollLbl>[A-Z])(?P<roll>-*[0-9\.]+)(?P<pitchLbl>[A-Z])(?P<pitch>-*[0-9\.]+)(?P<compassLbl>[A-Z])(?P<compass>-*[0-9\.]+)(?P<xLbl>[A-Z])(?P<x>-*[0-9\.]+)(?P<yLbl>[A-Z])(?P<y>-*[0-9\.]+)(?P<zLbl>[A-Z])(?P<z>-*[0-9\.]+)(?P<tempLbl>[A-Z])(?P<temp>-*[0-9\.]+)(?P<drillDLbl>[A-Z])(?P<drillD>-*[0-9\.]+)(?P<drillALbl>[A-Z])(?P<drillA>-*[0-9\.]+)", compassSentence) compassRecord = { "roll": float(compassReParsed.group('roll')), "pitch": float(compassReParsed.group('pitch')), "compass": float(compassReParsed.group('compass')), "x": float(compassReParsed.group('x')), "y": float(compassReParsed.group('y')), "z": float(compassReParsed.group('z')), "temp": float(compassReParsed.group('temp')), "drillD": float(compassReParsed.group('drillD')), "drillA": float(compassReParsed.group('drillA')) } return compassRecord def handle_compass0(self, topic, body): # example: 2:$GPRMC,225030.00,A,3725.1974462,N,12203.8994696,W,,,220216,0.0,E,A*2B serverTimestamp = datetime.datetime.now(pytz.utc) if body == 'NO DATA': logging.info('NO DATA') return # parse record resourceIdStr, compassStr, content = body.split(":", 2) resourceId = int(resourceIdStr) if not checkDataQuality(resourceId, content): logging.info('UNRECOGNIZED OR CORRUPT COMPASS SENTENCE: %s', content) return compassRecord = self.parseCompassData(content) self.adjustHeading(compassRecord) sourceTimestamp = serverTimestamp # Compass has no independent clock # save subsystem status to cache myKey = "compassCleanupEV%s" % resourceIdStr status = { 'name': myKey, 'displayName': 'Compass Cleanup EV%s' % str(resourceIdStr), 'statusColor': '#00ff00', 'lastUpdated': datetime.datetime.utcnow().isoformat(), 'elapsedTime': '' } cache.set(myKey, json.dumps(status)) # save latest compass reading in memcache for GPS use cacheKey = 'compass.%s' % resourceId cacheRecordDict = { "timestamp": sourceTimestamp, "compassRecord": compassRecord } cache.set(cacheKey, json.dumps(cacheRecordDict, cls=DatetimeJsonEncoder)) def handle_gpsposition0(self, topic, body): # example: 2:$GPRMC,225030.00,A,3725.1974462,N,12203.8994696,W,,,220216,0.0,E,A*2B serverTimestamp = datetime.datetime.now(pytz.utc) if body == 'NO DATA': logging.info('NO DATA') return # parse record resourceIdStr, trackName, content = body.split(":", 2) resourceId = int(resourceIdStr) if not checkDataQuality(resourceId, content): logging.info('UNRECOGNIZED OR CORRUPT GPS SENTENCE: %s', content) return sentenceType, utcTime, activeVoid, lat, latHemi, lon,\ lonHemi, speed, heading, date, declination, declinationDir,\ modeAndChecksum = content.split(",") if OVERRIDE_GPS_DATE: serverTime = datetime.datetime.now(pytz.utc) overrideDate = serverTime.strftime("%d%m%y") sourceTimestamp = datetime.datetime.strptime( '%s %s' % (overrideDate, utcTime), '%d%m%y %H%M%S.%f') else: sourceTimestamp = datetime.datetime.strptime( '%s %s' % (date, utcTime), '%d%m%y %H%M%S.%f') sourceTimestamp = sourceTimestamp.replace(tzinfo=pytz.utc) lat = parseTracLinkDM(lat, latHemi) lon = parseTracLinkDM(lon, lonHemi) # Get compass heading from compass record # TODO this clobbers heading read from GPS every time. but this is for basalt. do we care? heading = None compassCacheKey = 'compass.%s' % resourceId compassInfoString = cache.get(compassCacheKey) try: if compassInfoString: compassInfo = json.loads(compassInfoString) compassRecord = compassInfo["compassRecord"] # sanity check the timestamp in the compass record compassTimeString = compassInfo['timestamp'] compassTimestamp = dateparser(compassTimeString) tdelta = serverTimestamp - compassTimestamp if tdelta.total_seconds() <= MAX_COMPASS_TIME_SECONDS: heading = float(compassRecord["compass"]) except: traceback.print_exc() # save subsystem status to cache myKey = "telemetryCleanup" status = { 'name': myKey, 'displayName': 'Telemetry Cleanup', 'statusColor': OKAY_COLOR, 'lastUpdated': datetime.datetime.utcnow().isoformat(), 'elapsedTime': '' } cache.set(myKey, json.dumps(status)) # calculate which track record belongs to cacheKey = 'gpstrack.%s' % resourceId pickledTrack = cache.get(cacheKey) if pickledTrack: # cache hit, great track = pickle.loads(pickledTrack) else: # check db for a track matching this resourceId try: basaltVehicle = BasaltVehicle.objects.get( resourceId=resourceId) except ObjectDoesNotExist: logging.warning('%s', traceback.format_exc()) raise KeyError( 'Received GPS position for the EV with resourceId %s. Please ensure there is a vehicle with that id in the BasaltVehicle table.' % resourceId) # Check for track name. We use explicit name if specified, otherwise # we check for an active flight and finally use the resourceId if len(trackName): logging.info("Using track name from listener: %s" % trackName) if len(trackName ) == 0: # I.e. we were not given a name for track already try: activeFlight = BasaltActiveFlight.objects.get( flight__vehicle=basaltVehicle) trackName = activeFlight.flight.name logging.info( "Using track name from BasaltActiveFlight: %s" % trackName) except ObjectDoesNotExist: trackName = basaltVehicle.name logging.info("Using track name from EV arg: %s" % trackName) tracks = BasaltTrack.objects.filter(name=trackName) assert len(tracks) in (0, 1) if tracks: # we already have a valid track, use that track = tracks[0] else: # must start a new track track = BasaltTrack(name=trackName, vehicle=basaltVehicle, iconStyle=DEFAULT_ICON_STYLE, lineStyle=DEFAULT_LINE_STYLE, dataType=RAW_DATA_TYPE) track.save() # set cache for next time pickledTrack = pickle.dumps(track, pickle.HIGHEST_PROTOCOL) cache.set(cacheKey, pickledTrack, TRACK_CACHE_TIMEOUT) ###################################################################### # asset position ###################################################################### # create a NewAssetPosition row params = { 'track': track, 'timestamp': sourceTimestamp, 'serverTimestamp': serverTimestamp, 'latitude': lat, 'longitude': lon, 'heading': heading, 'altitude': None, } pos = PastPosition(**params) pos.save() # note: could queue for bulk save instead cpos = CurrentPosition(**params) cpos.saveCurrent() pos.broadcast() self.publisher.sendDjango(cpos)