def handleOrder(user, product_id): product = user.profile.has_product_id(product_id) success = False if product: # subscriptions_used = Subscription.objects.filter( # owner = request.user, # supply__in = Supply.objects.filter( # categories__in = product.categories.all() # ) # ) analytics.identify(user.id, { 'email': user.email, 'name': user.first_name, }) analytics.track(user.id, 'Placed an order', { 'product_id' : product.id, 'product_name' : product.name, }) order = Order(user=user, product=product) order.save() success = True return success
def test_async_full_identify(self): analytics.default_client.flush_at = 1 analytics.default_client.async = True last_identifies = analytics.stats.identifies last_successful = analytics.stats.successful traits = { "Subscription Plan": "Free", "Friends": 30 } context = { "ip": "12.31.42.111", "location": { "countryCode": "US", "region": "CA" }, "userAgent": ("Mozilla/5.0 (Macintosh; Intel Mac OS X 10_7_3) " + "AppleWebKit/534.53.11 (KHTML, like Gecko) Version/5.1.3 " + "Safari/534.53.10"), "language": "en-us" } analytics.identify('*****@*****.**', traits, context=context, timestamp=datetime.now()) self.assertEqual(analytics.stats.identifies, last_identifies + 1) sleep(2) self.assertEqual(analytics.stats.successful, last_successful + 1)
def test_async_basic_identify(self): # flush after every message analytics.default_client.flush_at = 1 analytics.default_client.async = True last_identifies = analytics.stats.identifies last_successful = analytics.stats.successful last_flushes = analytics.stats.flushes analytics.identify('*****@*****.**', { "Subscription Plan": "Free", "Friends": 30 }) self.assertEqual(analytics.stats.identifies, last_identifies + 1) # this should flush because we set the flush_at to 1 self.assertEqual(analytics.stats.flushes, last_flushes + 1) # this should do nothing, as the async thread is currently active analytics.flush() # we should see no more flushes here self.assertEqual(analytics.stats.flushes, last_flushes + 1) sleep(1) self.assertEqual(analytics.stats.successful, last_successful + 1)
async def save_id(): global user_id, user_bot me = await bot.get_me() user_id = me.id user_bot = me.bot if me.username is not None: sentry_sdk.set_user({ "id": user_id, "name": me.first_name, "username": me.username, "ip_address": "{{auto}}", "bot": f"{user_bot}" }) if allow_analytics: analytics.identify( user_id, { 'name': me.first_name, 'username': me.username, 'bot': f"{user_bot}" }) else: sentry_sdk.set_user({ "id": user_id, "name": me.first_name, "ip_address": "{{auto}}", "bot": f"{user_bot}" }) if allow_analytics: analytics.identify(user_id, { 'name': me.first_name, 'bot': f"{user_bot}" }) if user_bot: user_bot = me.username logs.info(f"{lang('save_id')} {me.first_name}({user_id})")
def _track_user_login(user, request): """ Sends a tracking event for a successful login. """ if hasattr(settings, 'LMS_SEGMENT_KEY') and settings.LMS_SEGMENT_KEY: tracking_context = tracker.get_tracker().resolve_context() analytics.identify( user.id, { 'email': request.POST['email'], 'username': user.username }, { # Disable MailChimp because we don't want to update the user's email # and username in MailChimp on every page load. We only need to capture # this data on registration/activation. 'MailChimp': False } ) analytics.track( user.id, "edx.bi.user.account.authenticated", { 'category': "conversion", 'label': request.POST.get('course_id'), 'provider': None }, context={ 'ip': tracking_context.get('ip'), 'Google Analytics': { 'clientId': tracking_context.get('client_id') } } )
def invite_contact_list(request): """Get a list of contact items {'name': Shahin, 'email': [email protected]} and trigger invitation email from inviter user. """ contacts = request.POST.get('contacts', None) result = {} try: contacts = json.loads(contacts) inviter = request.user inviter_id = request.user.id.user_id inviter_name = inviter.first_name + inviter.last_name for item in contacts: email = item['email'] name = item['name'] user_id = get_or_create_user_id(email).user_id analytics.identify(str(user_id), { "email": email, "firstName": name, }) data = { 'name': name, 'inviter': str(inviter), 'inviter_id': inviter_id, 'inviter_name': inviter_name, } analytics.track(user_id, 'invited', data) result['done'] = True except: result['error'] = 'Invalid data' return HttpResponse(json.dumps(result))
def register(): form = SignupForm() if request.method == 'POST' and form.validate(): user_per_email = User.objects(email=form.email.data) user_per_username = User.objects(username=form.username.data) if user_per_email: form.errors['email'] = ['Email is already taken.'] elif user_per_username: form.errors['username'] = ['Username is already taken.'] else: user = User(email=form.email.data, username=form.username.data) user.set_password(form.password.data) user.save() login_user(user) analytics.identify(str(user.id), {'email': str(user.email)}) analytics.track(str(user.id), 'Registered (srv)') return redirect('/data') for field, errors in form.errors.items(): # type(errors) for error in errors: flash(u"Error in %s - %s" % ( getattr(form, field).label.text, error)) return render_template('users/user.html', form=form)
def register_item(alias, api_key, myredis, mydao): if not is_valid_key(api_key): raise InvalidApiKeyException if is_registered(alias, api_key): raise ItemAlreadyRegisteredToThisKey registered_item = None (namespace, nid) = alias tiid = item.get_tiid_by_alias(namespace, nid, mydao) if not tiid: if is_over_quota(api_key): analytics.track("CORE", "Raised Exception", { "exception class": "ApiLimitExceededException", "api_key": api_key }) raise ApiLimitExceededException else: tiid = item.create_item(namespace, nid, myredis, mydao) analytics.identify(api_key, {"name": api_key, "api_user": True}) analytics.track(api_key, "Created item because of registration", { "tiid": tiid, "namespace": namespace, "nid": nid, "api_key": api_key }) api_user = get_api_user(api_key) if api_user: registered_item = save_registered_item(alias, api_user) return {"tiid":tiid, "registered_item":registered_item}
def identify_user(user): """ user, class User DB object Expects to be run within a session Note already checking system mode uptop with "if settings.DIFFGRAM_SYSTEM_MODE == "sandbox" """ # This is just for passing info about a user # to system try: analytics.identify( user.member_id, { 'email': user.email, 'first_name': user.first_name, 'last_name': user.last_name, 'how_hear_about_us': user.how_hear_about_us, 'signup_role': user.signup_role, 'signup_demo': user.signup_demo, 'city': user.city, 'company': user.company_name }) except: print("Analytics error") pass
def identify(user_id, properties, context=None): """ Wrapper for emitting Segment identify event. """ if hasattr(settings, 'LMS_SEGMENT_KEY') and settings.LMS_SEGMENT_KEY: segment_context = dict(context) if context else {} analytics.identify(user_id, properties, segment_context)
def register_item(alias, api_key, myredis, mydao): if not is_valid_key(api_key): raise InvalidApiKeyException if is_registered(alias, api_key): raise ItemAlreadyRegisteredToThisKey registered_item = None (namespace, nid) = alias tiid = item.get_tiid_by_alias(namespace, nid, mydao) if not tiid: if is_over_quota(api_key): analytics.track("CORE", "Raised Exception", { "exception class": "ApiLimitExceededException", "api_key": api_key }) raise ApiLimitExceededException else: tiid = item.create_item(namespace, nid, myredis, mydao) analytics.identify(api_key, {"name": api_key, "api_user": True}) analytics.track( api_key, "Created item because of registration", { "tiid": tiid, "namespace": namespace, "nid": nid, "api_key": api_key }) api_user = get_api_user(api_key) if api_user: registered_item = save_registered_item(alias, api_user) return {"tiid": tiid, "registered_item": registered_item}
def log_analytics(request, event, properties): try: import analytics from ipware.ip import get_ip as get_ip if settings.DEBUG: return if not hasattr(settings, "SEGMENT_IO_KEY"): logger.warning("Cannot send analytics. No Segment IO Key has been set") return if "pingdom" in request.META.get("HTTP_USER_AGENT", ""): logger.warning("Not recording analytics. Ignored pingdom bot") return api_key = settings.SEGMENT_IO_KEY ip = get_ip(request) name = names.get_full_name() uid = request.session.get("uid", name) request.session["uid"] = uid analytics.init(api_key) analytics.identify(uid, { "$name" : uid, }, { "$ip" : ip} ) analytics.track(uid, event=event, properties=properties) except Exception, e: logger.exception("Error handling analytics")
def track_command(user_id, command_name): analytics.write_key = ANALYTICS_WRITE_KEY analytics.identify(user_id) analytics.track(user_id, "Run Command", { "category": "ST3", "label": command_name, })
def unsubscribe_newsletter(request): email = request.GET.get('email', None) if not email: return HttpResponseBadRequest("") else: email = email.lower() subscriber = None try: subscriber = NewsletterSubscriber.objects.get(email=email) subscriber.delete() useremail = get_or_create_user_id(email) analytics.identify(useremail.user_id, traits={ 'newsletter': False, 'unsubscribed': True, }) analytics.track(useremail.user_id, 'unsubscribed_newsletter', { 'email': email, }) except: pass # Let's handle the unsubscription if even we don't have the user! return render_to_response("unsubscribed.html", {}, content_type="application/json")
def track_activate(user_id): analytics.write_key = ANALYTICS_WRITE_KEY analytics.identify(user_id) analytics.track(user_id, "Activate", { "category": "ST3", "label": sublime.platform(), })
def send_to_segment(context): jobID = os.environ["GITHUB_RUN_ID"] analytics.identify(jobID, { "name": "model-regression-tests", "created_at": datetime.datetime.now() }) analytics.track( jobID, "results", { "dataset": os.environ["DATASET_NAME"], "dataset_repository_branch": os.environ["DATASET_REPOSITORY_BRANCH"], "workflow": os.environ["GITHUB_WORKFLOW"], "config": os.environ["CONFIG"], "pr_url": os.environ["PR_URL"], "accelerator_type": os.environ["ACCELERATOR_TYPE"], "test_run_time": os.environ["TEST_RUN_TIME"], "train_run_time": os.environ["TRAIN_RUN_TIME"], "total_run_time": os.environ["TOTAL_RUN_TIME"], "github_run_id": os.environ["GITHUB_RUN_ID"], "github_sha": os.environ["GITHUB_SHA"], "github_event": os.environ["GITHUB_EVENT_NAME"], **context, }, )
def run(self, invitation_id): logger = self.get_logger() try: invitation = Invitation.objects.select_related('inviter').get(id=invitation_id) except Invitation.DoesNotExist: logger.warning('Cannot notify segment.io about Invitation[id=%d] because it cannot be found.', invitation_id) return logger.info('Notify segment.io about %s.', invitation) email = invitation.email if invitation.admin_id is None: analytics.identify(email, traits={'email': email, 'Lifecycle stage': 'lead', 'source': 'Invitation'}, context={'active': False}, timestamp=self.timestamp) invitation_url = settings.INVITATION_SITE_URL + '?invitation_key=%s' % invitation.key # if invitation was created by admin, we will include it in invitation event if invitation.inviter is None: inviter = None else: inviter = invitation.inviter.email analytics.track(invitation.admin_id or email, 'Invitation received', { 'invitationUrl': invitation_url, 'inviter': inviter, 'instance': invitation.instance.name }, timestamp=self.timestamp)
def register(): form = SignupForm() if request.method == 'POST' and form.validate(): user_per_email = User.objects(email=form.email.data) user_per_username = User.objects(username=form.username.data) if user_per_email: form.errors['email'] = ['Email is already taken.'] elif user_per_username: form.errors['username'] = ['Username is already taken.'] else: user = User(email=form.email.data, username=form.username.data) user.set_password(form.password.data) user.save() login_user(user) analytics.identify(str(user.id), {'email': str(user.email)}) analytics.track(str(user.id), 'Registered (srv)') return redirect('/data') for field, errors in form.errors.items(): # type(errors) for error in errors: flash(u"Error in %s - %s" % (getattr(form, field).label.text, error)) return render_template('users/user.html', form=form)
def lambda_handler(event, context): logger.debug("Segment event: " + json.dumps(event)) # Remove this if using in production. # Segment will invoke your function once per event type you have configured # in the Personalize destination in Segment. try: if ('userId' in event and event['name'] == 'OrderCompleted'): user_id = event['userId'] logger.debug('Looking up product recommendations for user ' + user_id) url = f'{recommendations_service_url}/recommendations?userID={user_id}&fullyQualifyImageUrls=1&numResults=4' response = requests.get(url) recommendations = None if response.ok: recommendations = response.json() logger.debug(recommendations) # Send the user recommendations to Segment analytics.identify( user_id, {'personalized_recommendations': recommendations}) except ValueError as ve: logger.error("Invalid JSON format received, check your event sources.") except KeyError as ke: logger.error("Invalid configuration for Personalize, most likely.")
def identify(email, name, attributes, orgs=[]): # pragma: no cover """ Creates and identifies a new user to our analytics backends. It is ok to call this with an existing user, their name and attributes will just be updated. """ # no op if we aren't prod if not settings.IS_PROD: return # post to segment if configured if _segment: segment_analytics.identify(email, attributes) # post to intercom if configured if _intercom: try: # rip out duplicate fields for intercom for key in ("first_name", "last_name", "email"): attributes.pop(key, None) intercom_user = _intercom.users.create(email=email, name=name, custom_attributes=attributes) intercom_user.companies = [ dict( company_id=org.id, name=org.name, created_at=datetime_to_json_date(org.created_on), custom_attributes=dict(brand=org.brand, org_id=org.id), ) for org in orgs ] _intercom.users.save(intercom_user) except: logger.error("error posting to intercom", exc_info=True)
def checkout(request, invoiceDate, redirectAddress, bank_transaction, failredirectAddress): invoiceNumber = AmirId(request.session['user_mail']) amount = bank_transaction.amount request.session["invoice_id"] = invoiceNumber now = datetime.now() pay = BankPayment() pay.invoice_number = invoiceNumber pay.amount = amount pay.session_key = request.session.session_key pay.user_mail = request.session["user_mail"] pay.gateway = "mellat" pay.user_id = request.session.get("user_id", None) pay.save() context = { 'terminalId': TERMINAL_ID, 'userName': USERNAME, 'userPassword': PASSWORD, 'orderId': invoiceNumber, 'amount': amount, 'localDate': now.strftime("%Y%m%d"), 'localTime': now.strftime("%H%I%S"), 'additionalData': pay.user_mail, 'callBackUrl': redirectAddress, 'payerId': "0" } print context client = suds.client.Client(ENDPOINT) result = client.service.bpPayRequest(**context).split(",") if result[0] != "0" and not os.environ.get('DEVELOPMENT', False): if int(result[0]) in CATASTROPHIC_ERRORS: analytics.identify("Pivot_Error", traits={'email': "REPORT_EMAIL"}) analytics.track("Pivot_Error", "f5_error", { "error": result[0], "reference": invoiceNumber, }) analytics.flush() print result request.session['transaction'].res_code = int(result[0]) pay.ref_id = result[0] pay.save() return payment.views.create_redirect_page(request, { "mellat": True, "error": True }) print bank_transaction context["refId"] = result[1] pay.ref_id = context["refId"] pay.save() bank_transaction.ref_id = context["refId"] request.session['transaction'] = bank_transaction context["mellat"] = True return payment.views.create_redirect_page(request, context)
def run(self, *args, **kwargs): uuid = kwargs.get('uuid') email = kwargs.get('email') name = kwargs.get('name') analytics.identify(uuid, { 'email': email, 'name': name, })
def run_uservoice(): analytics.identify(user_id="uservoice") ticket_dict = Uservoice.get_ticket_stats() print ticket_dict analytics.track(user_id="uservoice", event='UserVoice ticket stats', properties=ticket_dict) return(ticket_dict)
def login(self, api_key=None): if self.logged_in(): return # Do login, push any error to message bar try: # Don't switch panels just yet self.p_client.blockSignals(True) self.p_client.log_in(self.leUser.text(), self.lePass.text(), api_key=api_key) self.p_client.blockSignals(False) except LoginException as e: self.show_message('Login failed!', show_more=str(e.__cause__), level=Qgis.Warning) # Stay on login panel if error return # Login OK self.api_key = self.p_client.api_key() user = self.p_client.user() if is_segments_write_key_valid(): analytics.identify( user["email"], { "email": user["email"], "apiKey": user["api_key"], "organizationId": user["organization_id"], "programId": user["program_id"], "qgisVersion": Qgis.QGIS_VERSION, "pluginVersion": plugins.all()["planet_explorer"]['version_installed'] }) analytics.track(user["email"], "Log in to Explorer") if is_sentry_dsn_valid(): with sentry_sdk.configure_scope() as scope: scope.user = {"email": user["email"]} # Store settings if self.chkBxSaveCreds.isChecked(): self._store_auth_creds() # For debugging specs = f'logged_in={self.logged_in()}\n\n' \ f'api_key = {self.p_client.api_key()}\n\n' \ f'user: {self.p_client.user()}\n\n' log.debug(f'Login successful:\n{specs}') # Now switch panels self.p_client.loginChanged.emit(self.p_client.has_api_key())
def send_user_state_analytics(user, request): state_dict = {} state = {'1': 'active', '2': 'limited', '3': 'blocked'} for (product, device_id) in get_device_details(user).items(): device_data = get_device_status(request, device_id) try: state_dict[product] = state[str(device_data['user_status'])] except Exception as e: print e analytics.identify(user.id.user_id, {'state': state_dict})
def login_log(sender, **kwargs): print "user logged in" user = kwargs['user'] analytics.identify(user.id, { 'email': user.email, 'firstName': user.first_name, 'lastName': user.last_name, 'username': user.get_username() }) analytics.track(user.id, 'Logged In')
def run(self, admin_id, uid, email, token): activation_url = settings.GUI_ACTIVATION_URL % {'uid': uid, 'token': token} analytics.track(admin_id, 'Sign Up Activation', {'activationUrl': activation_url, 'authBackend': 'password'}, timestamp=self.timestamp) analytics.identify( admin_id, traits={'email': email, 'activationUrl': activation_url}, context={'active': False}, timestamp=self.timestamp )
def run(self, admin_id, email, created_at, distinct_id=None, backend='password', activation_url=None): if distinct_id is not None: analytics.alias(distinct_id, email) analytics.alias(email, admin_id) analytics.identify(admin_id, traits={'email': email, 'Lifecycle stage': 'customer', 'created_at': created_at}, timestamp=self.timestamp) data = {'authBackend': backend} if activation_url is not None: data['activationUrl'] = activation_url analytics.track(admin_id, 'Sign up', data, timestamp=self.timestamp)
def login_log(sender, **kwargs): print "user logged in" user = kwargs['user'] analytics.identify( user.id, { 'email': user.email, 'firstName': user.first_name, 'lastName': user.last_name, 'username': user.get_username() }) analytics.track(user.id, 'Logged In')
def login(): form = LoginForm() if request.method == 'POST' and form.validate(): user = User.objects(email=form.email.data).first() login_user(user) analytics.identify(str(user.id), {'email': str(user.email)}) analytics.track(str(user.id), 'Logged in (srv)') return redirect('/data') else: print form.errors return render_template('users/user.html', form=form)
def register_api(request): form = UserCreationForm(request.POST, None) if not form.is_valid(): res = { "messages": [(unicode(form.fields[field].label), message) for field, message in form.errors.items()], "error": True } return JsonResponse(res) idfv = request.POST.get('idfv', None) aid = request.POST.get('aid', None) uuid = request.POST.get('uuid', None) dev = request.POST.get('dev', None) hash_req = request.POST.get('data', None) if not (idfv and aid and uuid) or not validate_basic_device( idfv, aid, uuid, dev, hash_req): return JsonResponse({"error": True}) form.instance.id = get_or_create_user_id(request.POST.get("username")) user = form.save() full_name = user.first_name.strip() name_list = full_name.split() analytics.identify( user.id.user_id, { "email": user.username, "fullName": full_name, "firstName": name_list[1] if name_list[0] == 'ﺱیﺩ' else name_list[0], }) try: device = get_or_create_basic_device(user, idfv, aid, uuid) except: user.delete() return JsonResponse({"error": True}) try: user_info_object = appdb.users.insert({ 'user_id': str(user.id.user_id), 'device': str(device.pk), "campaigns": get_basic_campaign() }) except: user.delete() device.delete() return JsonResponse({"error": True}) _registration_email_send(request, user) return JsonResponse({"done": True})
def _track_user_registration(user, profile, params, third_party_provider): """ Track the user's registration. """ if hasattr(settings, 'LMS_SEGMENT_KEY') and settings.LMS_SEGMENT_KEY: tracking_context = tracker.get_tracker().resolve_context() identity_args = [ user.id, { 'email': user.email, 'username': user.username, 'name': profile.name, # Mailchimp requires the age & yearOfBirth to be integers, we send a sane integer default if falsey. 'age': profile.age or -1, 'yearOfBirth': profile.year_of_birth or datetime.datetime.now(UTC).year, 'education': profile.level_of_education_display, 'address': profile.mailing_address, 'gender': profile.gender_display, 'country': text_type(profile.country), } ] if hasattr(settings, 'MAILCHIMP_NEW_USER_LIST_ID'): identity_args.append( {"MailChimp": { "listId": settings.MAILCHIMP_NEW_USER_LIST_ID }}) analytics.identify(*identity_args) analytics.track( user.id, "edx.bi.user.account.registered", { 'category': 'conversion', 'label': params.get('course_id'), 'provider': third_party_provider.name if third_party_provider else None }, context={ 'ip': tracking_context.get('ip'), 'Google Analytics': { 'clientId': tracking_context.get('client_id') } })
def send_user_latestplan_analytics(user, request): latestPlan = {} for device in user.device_set.all(): if device.device_id: import apps.panel.f5adapter as f5 tup = f5.device_details(request, device.id) try: latestPlan[device.device_id] = get_plan_label( tup[2]['plan_label']) except: pass analytics.identify(user.id.user_id, {'latestPlan': latestPlan})
def send_userdownloadlink_analytics(user): userDownloadUrl = {} for (product, device_id) in get_device_details(user).items(): udid = Device.objects.get(pk=device_id).device_id try: r = requests.get('https://USER_APP_DOWNLOAD_PREFIX?udid=%s' % udid, verify=False) js = r.json() userDownloadUrl[product] = js['link'] except: pass analytics.identify(user.id.user_id, {'userDownloadUrl': userDownloadUrl})
def receive_device_details(request): print repr(request) body = request.body body = body[body.find('<?xml version="1.0"'):body.find('</plist>') + 8] pl = readPlistFromString(body) deviceRecordId = pl['CHALLENGE'] if not deviceRecordId: raise PermissionDenied() challenge_obj = get_object_or_404(DeviceChallengeToken, token=deviceRecordId, is_used=False) deviceId = pl['UDID'] device = challenge_obj.device device.device_id = deviceId device.device_version = pl['VERSION'] device.device_product = pl['PRODUCT'] device.save() print 'DEVICE', device, deviceId challenge_obj.is_used = True challenge_obj.save() res = app.send_task('appsign.udid_tasks.register_udid', args=[pl['UDID'], device.user.first_name], queue='udid') print pl analytics.identify(device.user.id.user_id, traits={"email": device.user.username}) analytics.track(device.user.id.user_id, 'send_udid', {"udid": pl['UDID']}) send_user_state_analytics(device.user, request) send_user_udid_analytics(device.user) send_user_latestplan_analytics(device.user, request) # See http://stackoverflow.com/questions/5781314/getting-a-device-udid-from-mobileconfig # Shold be like http://example.com/directory url = request.build_absolute_uri( reverse('receiveDeviceDetailsSuccess', kwargs={'device_id': deviceId})) #should I pass params? url += "?params=done" url = settings.REPO_SUCCESS # return redirect('receiveDeviceDetailsSuccess' , params) print "heading to %s" % url return HttpResponsePermanentRedirect(url)
def create_profile(request): profile_id = None if request.POST.get('name'): # you did POST - need to create your profile and load page profilesDao = ProfilesDao() client_ip = get_client_ip(request) # Get geoip info geoip_info_response = None try: geoip_info_response = requests.get('http://freegeoip.net/json/%s'%client_ip, timeout=1.0) except: pass if geoip_info_response and geoip_info_response.status_code == 200: geoip_info = geoip_info_response.json() else: # TODO: Log a bad response code somewhere geoip_info = None profile_id = profilesDao.create_new_profile( name=request.POST['name'], email=request.POST['email'], country=request.POST['country'], city=request.POST['city'], interests=request.POST['interests'], event_slug=request.event.slug, ip=client_ip, geoip_info=geoip_info) analytics.identify(request.POST['email'], { 'name': request.POST['name'], 'city': request.POST['city'], 'interests': request.POST['interests'], 'eventSlug': request.event.slug, 'ip': get_client_ip(request) }) analytics.track(request.POST['email'], 'Joined event', { 'eventSlug' : request.event.slug, 'country' : request.POST['country'], 'ip' : get_client_ip(request) }) request.session['profile_id'] = profile_id; return shortcuts.redirect('/chat') else: return shortcuts.redirect('/')
def identify(self, traits, timestamp=None, context=None): """identify a user to segment.io with :param traits: data to be associated with the user :param timestamp: a datetime.datetime timestamp :param context: optional dictionary specified by segment.io """ context = context or {} log.info('Identified %s with traits: %s' % (self.user_id, traits)) analytics.identify( user_id=self.user_id, traits=traits, context=context, timestamp=timestamp)
def __init__(self): """Generate this user's properties and sign it up to Segment.""" self.name = fake.name() self.id = str(uuid.uuid4()) # The tens digit of the user's age self.age_bracket = random.randint(2, 7) # Max number of consecutive days that user will stay before leaving self.perferred_stay = [1, 2, 5, 7][random.randint(0,3)] # A multiplier to enthusiasm losses to bad weather self.weather_tolerance = random.random() * self.age_bracket self.satisfaction = random.random() self.reservation_length = self.perferred_stay analytics.identify(self.id, {'name': self.name})
def run_couch(): analytics.identify(user_id="stats") rows = Couchdb.get_view("collections_per_genre/collections_per_genre", True) products_per_quasigenre = {} for row in rows: products_per_quasigenre[row["key"]] = row["value"] products_per_quasigenre["total"] = products_per_quasigenre[":"] del products_per_quasigenre[":"] logger.info("products_per_quasigenre:" + str(products_per_quasigenre)) analytics.track(user_id="stats", event='Profiles per quasigenre', properties=products_per_quasigenre) return(rows)
def index(request): analytics.identify(request.user.id, { 'name': request.user.username, }) analytics.page('index'); asset_list = Asset.objects.all() paginator = Paginator(asset_list, 25) # Show 25 contacts per page page = request.GET.get('page') try: assets = paginator.page(page) except PageNotAnInteger: # If page is not an integer, deliver first page. assets = paginator.page(1) except EmptyPage: # If page is out of range (e.g. 9999), deliver last page of results. assets = paginator.page(paginator.num_pages) context = {'assets': assets} return render(request, 'index.html', context)
def pools(request): context = RequestContext(request) resultados = [] usuario = request.user analytics.identify(user_id=usuario.pk, traits={ 'email': usuario.email, 'firstName': usuario.first_name, 'lastName': usuario.last_name, 'createdAt': usuario.date_joined.isoformat() }) if request.method == 'POST': search_form = SearchForm(request.POST) if search_form.is_valid(): try: resultados = Pool.objects.filter(origen=search_form.cleaned_data['origen'], destino=search_form.cleaned_data['destino']) analytics.track(usuario.pk, 'Busca ruta', { 'origen': search_form.cleaned_data['origen'], 'destino': search_form.cleaned_data['destino'] }) except Exception as e: print type(e) print e.args print e else: print search_form.errors colonias = Colonia.objects.all() return render_to_response('pools.html', {'resultados': resultados, 'colonias': colonias, 'pageType': 'Listado Pools'}, context)
def identify(user, brand, org): """ Creates and identifies a new user to our analytics backends. It is ok to call this with an existing user, their name and attributes will just be updated. """ # no op if we aren't prod if not settings.IS_PROD: return attributes = dict( email=user.username, first_name=user.first_name, segment=randint(1, 10), last_name=user.last_name, brand=brand ) user_name = f"{user.first_name} {user.last_name}" if org: attributes["org"] = org.name attributes["paid"] = org.account_value() # post to segment if configured if _segment: # pragma: no cover segment_analytics.identify(user.username, attributes) # post to intercom if configured if _intercom: try: # rip out duplicate fields for intercom for key in ("first_name", "last_name", "email"): attributes.pop(key, None) intercom_user = _intercom.users.create(email=user.username, name=user_name, custom_attributes=attributes) intercom_user.companies = [ dict( company_id=org.id, name=org.name, created_at=json.encode_datetime(org.created_on), custom_attributes=dict(brand=org.brand, org_id=org.id), ) ] _intercom.users.save(intercom_user) except Exception: logger.error("error posting to intercom", exc_info=True)
def widget_analytics(): d = {} for k, v in request.args.iteritems(): d[k] = v try: d["hostname"] = d['url'].split("/")[2] d["domain"] = ".".join(d['hostname'].split(".")[-2:]) # like "impactstory.org" except KeyError: #nevermind then pass try: api_key = d["api-key"] except KeyError: api_key = "unknown" logger.info(u"got widget analytics data: {data}".format( data=d)) try: # later look stuff up here from db, based on api-key; send along w identify() call... analytics.identify(user_id=api_key) except IndexError: logger.debug(u"IndexError when doing analytics.identify in widget_analytics") try: analytics.track( user_id=api_key, event="Served a page with embedded widget", properties=d ) except IndexError: logger.debug(u"IndexError when doing analytics.track in widget_analytics") try: analytics.flush(async=False) # make sure all the data gets sent to segment.io except IndexError: # sometimes the data was already flushed and we get an error popping from an empty queue logger.debug(u"IndexError when doing analytics.flush in widget_analytics") return make_response(request.args.get("callback", "") + '({"status": "success"})', 200)
def test_alias(self): session_id = str(randint(1000000, 99999999)) user_id = 'bob+'+session_id + '@gmail.com' analytics.default_client.flush_at = 1 analytics.default_client.async = False last_aliases = analytics.stats.aliases last_successful = analytics.stats.successful analytics.identify(session_id, traits={'AnonymousTrait': 'Who am I?'}) analytics.track(session_id, 'Anonymous Event') # alias the user analytics.alias(session_id, user_id) analytics.identify(user_id, traits={'IdentifiedTrait': 'A Hunk'}) analytics.track(user_id, 'Identified Event') self.assertEqual(analytics.stats.aliases, last_aliases + 1) self.assertEqual(analytics.stats.successful, last_successful + 5)
def identify(): try: content = request.get_json(silent=True) _userId = content.get('userId') _traits = content.get('traits') _context = content.get('context') _timestamp = format_timestamp(content.get('timestamp')) _anonymous_id = content.get('anonymousId') _integrations = content.get('integrations') res = analytics.identify(_userId, _traits, _context, _timestamp, _anonymous_id, _integrations) return format_response('identify') except Exception as e: return json.dumps({'error': str(e)})
def identify(self, updatedrows, writekey): analytics.write_key = writekey # change to cloudwatch logging.basicConfig(filename='identify.log',level=logging.INFO,format='%(asctime)s %(message)s') for row in updatedrows: traits = {} userid = '' for key, value in row.items(): if key == 'id': userid = value else: traits[key] = value if not userid: raise Exception('Missing id in row ' + row) if not traits: raise Exception('No traits for row ' + row) analytics.identify(userid, traits) logging.info('%s userId:%s traits:%s', writekey, userid, traits)
def webhook(source): logger.info("got webhook from " + source.upper()) if source == "errorception": # example whole post: {"isInline":true,"message":"Uncaught TypeError: Cannot call method 'split' of undefined","userAgent":"Mozilla/5.0 (Macintosh; Intel Mac OS X 10_8_3) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/27.0.1453.116 Safari/537.36","when":"before","scriptPath":null,"page":"http://impactstory.org/faq","date":"2013-07-24T03:44:01.611Z","isFirstOccurrence":false,"webUrl":"http://errorception.com/projects/51ef3db2db2bef20770003e2/errors/51ef4d2114fb556e3de3f3d2","apiUrl":"https://api.errorception.com/projects/51ef3db2db2bef20770003e2/errors/51ef4d2114fb556e3de3f3d2"} secret = os.getenv("ERRORCEPTION_SECRET", "") error_message = request.json.get("message", None) error_page = request.json.get("page", None) m = hashlib.sha1() m.update(secret + error_message + error_page) x_signature = request.headers.get("X-Signature") if x_signature == m.hexdigest(): analytics.identify(user_id="WEBAPP") analytics.track("WEBAPP", "Caused a JavaScript error", request.json) elif source == "papertrail": alert_descriptions = { "exception": "Threw an Exception", "cant_start_new_thread": "Couldn't start a new thread", "api_status_500": "Returned a server error from our API", "unspecified": "Sent a Papertrail alert" } jsonstr = json.loads(request.form['payload']) #Load the Payload (Papertrail events) for event in jsonstr['events']: #Iterate through events message = "(" + event["display_received_at"] + ")" + " " + event["source_name"] + " " + event["program"] + " | " + event["message"] logger.info("Brief version:" + message) logger.info("Full event:") logger.info(json.dumps(event, indent=4)) if event["source_name"] in ["ti-core", "ti-webapp"]: app_name = event["source_name"].replace("ti-", "").upper() analytics.identify(user_id=app_name) alert_name = request.args.get("alert_name", "unspecified") analytics.track(app_name, alert_descriptions[alert_name], event) else: logger.info("Unknown event source_name, not sending") elif source == "email": # right now these are all from PAGERDUTY. Do something smarter later. analytics.identify(user_id="PAGERDUTY") analytics.track("PAGERDUTY", "Alert from PagerDuty", request.json) else: logger.info("got webhook from a place we didn't expect") logger.info(source + " whole post: ") logger.info(request.data) resp = make_response(json.dumps({"source": source}, indent=4), 200) resp.mimetype = "application/json" return resp
def enhance_segment_user(user): resp = SearchAPIRequest(email=user.email, first_name=user.first_name, last_name=user.last_name, show_sources='matching').send() if resp.warnings: logger.warning("Got {} warnings for Pipl enhancement".format(len(resp.warnings))) for warning in resp.warnings: logger.warning(warning) if resp.person: traits = {} if person.dob: traits['age'] = person.dob.age if person.educations: traits['degree'] = person.educations[0]._display if person.languages: traits['language'] = person.languages[0]._display if person.gender: traits['gender'] = person.gender.display for p in person.phones: if p.type == 'work_phone': traits['phone'] = p.display_international if person.names and (not user.first_name or not user.last_name): name = person.names[0] traits['firstName'] = name.first traits['lastName'] = name.last if person.jobs: job = person.jobs[0] traits['Job Title'] = job.title traits['Organization'] = job.organization traits['Industry'] = job.industry if person.emails: query_md5 = resp.query.emails[0].address_md5 match_email = ([e for e in person.emails if e.address_md5 == query_md5] or [None])[0] if match_email: traits['Disposable Email'] = 'Yes' if match_email.disposable else 'No' traits['Email Provider'] = 'Public' if match_email.email_provider else 'Work' domain_names = [ ('linkedin.com', 'LinkedIn'), ('facebook.com', 'Facebook'), ('plus.google.com', 'G+'), ('twitter.com', 'Twitter'), ('pinterest.com', 'Pinterest'), ('reddit.com', 'Reddit'), ] sources_by_domain = resp.group_sources_by_domain() for domain, name in domain_names: sources = sources_by_domain.get(domain, []) for src in sources: if src.usernames: traits['{} Username'.format(name)] = src.usernames[0].content if src.user_ids: traits['{} User ID'.format(name)] = src.user_ids[0].content.split('@')[0] break for src in sources: if 'avatar' not in traits and src.images: traits['avatar'] = src.images[0].url if 'avatar' not in traits and person.images: traits['avatar'] = person.images[0].url analytics.identify(user.email, traits)
def identify(username, attributes): """ Pass through to segment.io analytics. """ segment_analytics.identify(username, attributes)
# config and debugging stuff app.config["SECRET_KEY"] = os.getenv("SECRET_KEY") # set up Flask-DebugToolbar if (os.getenv("FLASK_DEBUG", False) == "True"): logger.info("Setting app.debug=True; Flask-DebugToolbar will display") app.debug = True app.config["DEBUG_TB_INTERCEPT_REDIRECTS"] = False toolbar = DebugToolbarExtension(app) # segment.io logging analytics.init(os.getenv("SEGMENTIO_PYTHON_KEY")) analytics.identify("CORE", { 'internal': True, 'name': 'IMPACTSTORY CORE'}) # set up views and database, if necessary try: from totalimpact import views except exc.ProgrammingError: logger.info("SQLAlchemy database tables not found, so creating them") db.session.rollback() db.create_all() from totalimpact import views try:
def test_identify(self): analytics.identify('userId', { 'email': '*****@*****.**' }) analytics.flush()
import analytics import logging,time,subprocess from bson.objectid import ObjectId from os import listdir,environ,makedirs,path import sys,time import graypy from pymongo import MongoClient import settings analytics.write_key = settings.ANALYTICS_KEY analytics.identify('downloader', traits={ 'email': settings.REPORT_EMAIL, }) def send_download_status(app, status, extra=None): analytics.track('downloader', 'download_status', { 'title': app, 'message': "downloaded" if status else "failed", 'extra': extra, }) class Downloader: def __init__(self, db=None, impdir=None): self.db = db self.impdir = impdir # logging.basicConfig(filename=("log/downloader-%s.log" % int(time.time())),level=logging.INFO) logging.basicConfig(level=logging.INFO)
logging.basicConfig( stream=sys.stdout, level=logging.DEBUG, format='[%(process)d] %(levelname)8s %(threadName)30s %(name)s - %(message)s' ) logger = logging.getLogger("impactstoryanalytics") # set up application app = Flask(__name__) # allow slashes and end of URLs even when they're not part of views: # http://flask.pocoo.org/mailinglist/archive/2011/2/27/re-automatic-removal-of-trailing-slashes/#043b1a0b6e841ab8e7d38bd7374cbb58 app.url_map.strict_slashes = False # setup segment.io analytics.init(os.getenv("SEGMENTIO_KEY"), log_level=logging.DEBUG, flush_at=1) analytics.identify(user_id='Heather', traits={ "name": "Heather", "email": "*****@*****.**", }) analytics.identify(user_id='Jason', traits={ "name": "Jason", "email": "*****@*****.**", }) # set up views from impactstoryanalytics import views
from pyspark import SparkContext import analytics analytics.write_key = 'vYMEyDSwgdCuIpakFZBa4gbYkM6z1vuA' inputFile = "file:/wordcount/input/Had*" sc = SparkContext("local", "Simple App") inputData = sc.textFile(inputFile).cache() counts = inputData.flatMap(lambda line: line.split(" ")) \ .map(lambda word: (word, 1)) \ .reduceByKey(lambda a, b: a + b) analytics.identify('019mr8mf4r', { 'email': '*****@*****.**', 'name': 'Karthick Sriraman', 'context': 'Ran Word Count on Spark', }) counts.saveAsTextFile("file:/wordcount/output/counts")