def group(self, user_id=None, group_id=None, traits=None, context=None, timestamp=None, anonymous_id=None, integrations=None): if user_id is None: raise TypeError(u'User ID expected for group() calls, got None.') if group_id is None: raise TypeError(u'Group ID expected for group() calls, got None.') require('group_id', group_id, ID_TYPES) require('traits', traits, dict) data = { 'write_key': self.write_key, 'kwargs': { 'integrations': integrations, 'anonymousId': anonymous_id, 'timestamp': timestamp, 'groupId': group_id, 'context': context, 'userId': user_id, 'traits': traits, 'type': 'group' } } return self._enqueue(data)
def track(self, user_id=None, event=None, properties=None, context=None, timestamp=None, anonymous_id=None, integrations=None, ga_client_id=None): require('properties', properties, dict) require('event', event, string_types) data = { 'write_key': self.write_key, 'kwargs': { 'anonymousId': anonymous_id, 'context': context, 'event': event, 'integrations': integrations, 'properties': properties, 'type': 'track', 'userId': str(user_id), } } # Attach the clientId for our server-side events, so we can correlate them # to front-end events in Google Analytics if ga_client_id is not None: data['kwargs']['context'] = { 'Google Analytics': { 'clientId': ga_client_id, }, } self._enqueue(data)
def identify(self, user_id=None, traits=None, context=None, timestamp=None, anonymous_id=None, integrations=None): if user_id is None: raise TypeError( u'User ID expected for identify() calls, got None.') require('traits', traits, dict) data = { 'write_key': self.write_key, 'kwargs': { 'anonymousId': anonymous_id, 'context': context, 'integrations': integrations, 'timestamp': timestamp, 'traits': traits, 'type': 'identify', 'userId': user_id, }, } self._enqueue(data)
def __init__(self, write_key=None, debug=False, max_queue_size=10000, send=True, on_error=None): # We need a different queue type to have a client that works properly across separate processes. # This client is meant to be shared and used within celery environment (although it should work otherwise). # This will hopefully fix: https://github.com/segmentio/analytics-python/issues/51 although it is uncertain # what exactly is causing it. require('write_key', write_key, str) self.queue = JoinableQueue(max_queue_size) self.consumer = Consumer(self.queue, write_key, on_error=on_error) self.write_key = write_key self.on_error = on_error self.debug = debug self.send = send if debug: self.log.setLevel(logging.DEBUG) # if we've disabled sending, just don't start the consumer if send: self.consumer.start()
def _enqueue(self, data): """ Enqueue event calls Perform some common validation on the tracking data """ if data['kwargs']['context'] is None: data['kwargs']['context'] = {} if data['kwargs']['integrations'] is None: data['kwargs']['integrations'] = {} require('context', data['kwargs']['context'], dict) require('integrations', data['kwargs']['integrations'], dict) require('type', data['kwargs']['type'], string_types) user_or_anon = data['kwargs']['userId'] or data['kwargs']['anonymousId'] require('user_id or anonymous_id', user_or_anon, ID_TYPES) # Record the timestamp for when the event occured, not the eventual time at # which it gets delivered in the background. data['kwargs']['timestamp'] = datetime.datetime.utcnow().replace(tzinfo=tzutc()).isoformat() data['kwargs']['messageId'] = str(uuid.uuid4()) # walk like a duck, quack like a duck data['kwargs']['context']['library'] = { 'name': 'analytics-python', 'version': '1.0.3' } pickled_data = pickle.dumps(data) self.redis_conn.rpush(EVENT_QUEUE_NAME, pickled_data)
def _enqueue(self, data): """ Enqueue event calls Perform some common validation on the tracking data """ if data['kwargs']['context'] is None: data['kwargs']['context'] = {} if data['kwargs']['integrations'] is None: data['kwargs']['integrations'] = {} require('context', data['kwargs']['context'], dict) require('integrations', data['kwargs']['integrations'], dict) require('type', data['kwargs']['type'], string_types) user_or_anon = data['kwargs']['userId'] or data['kwargs']['anonymousId'] require('user_id or anonymous_id', user_or_anon, ID_TYPES) # Record the timestamp for when the event occured, not the eventual time at # which it gets delivered in the background. data['kwargs']['timestamp'] = datetime.datetime.utcnow().replace( tzinfo=tzutc()).isoformat() data['kwargs']['messageId'] = str(uuid.uuid4()) # walk like a duck, quack like a duck data['kwargs']['context']['library'] = { 'name': 'analytics-python', 'version': '1.0.3' } pickled_data = pickle.dumps(data) self.redis_conn.rpush(EVENT_QUEUE_NAME, pickled_data)
def alias(self, previous_id=None, user_id=None, context=None, timestamp=None, integrations=None): require('previous_id', previous_id, ID_TYPES) require('user_id', user_id, ID_TYPES) data = { 'write_key': self.write_key, 'kwargs': { 'context': context, 'integrations': integrations, 'previousId': previous_id, 'timestamp': timestamp, 'type': 'alias', 'userId': user_id, }, } return self._enqueue(data)
def identify(self, user_id=None, traits=None, context=None, timestamp=None, anonymous_id=None, integrations=None): if user_id is None: raise TypeError(u'User ID expected for identify() calls, got None.') require('traits', traits, dict) data = { 'write_key': self.write_key, 'kwargs': { 'anonymousId': anonymous_id, 'context': context, 'integrations': integrations, 'timestamp': timestamp, 'traits': traits, 'type': 'identify', 'userId': user_id, }, } self._enqueue(data)