def possibly_sharded(self): if self.completed_at: return None if self.accountsyncshard_set.filter(completed_at__isnull=True).count(): return None now = time() locked = (self.__class__.objects.filter(id=self.id, sharded_at__isnull=True).update(sharded_at=now) == 1) if not locked: return None self.sharded_at = now from webinars_web.webinars import models as wm with wm.EventSync.delayed as d: now = time() for event in self.account.event_set.filter(deleted_at__isnull=True, current_sync__isnull=True, mothballed=False).select_related('account'): d.insert(dict(event_id=event.id, parent_id=self.id, visible=self.visible, debug=self.debug, created_at=now.us)) from django.db import transaction transaction.commit_unless_managed() event_syncs = wm.EventSync.objects.filter(parent=self, event__account=self.account) if not self.debug: tasks = [] for es in event_syncs: url = '%s%s'%(settings.APP_URL,es.kickoff_path) uid = 'event_sync|kickoff|%s'%es.id qid = es.event.account_id % settings.NUM_QUEUES logging.debug('TQDEBUG: qid id %s' % qid) tasks.append(Task(queue=settings.TASK_QUEUES[qid], url=url, method='POST', uid=uid)) Task._enqueue(tasks) self.possibly_done()
def __init__(self, account, **kwargs): super(Event, self).__init__() self.account = account self.title = nstrip(mpop(kwargs, 'title', 'sessionName', 'confName')) self._starts_at = mpop(kwargs, 'starts_at', '_starts_at') self._ends_at = mpop(kwargs, 'ends_at', '_ends_at') self._started_at = mpop(kwargs, 'started_at', '_started_at') self._ended_at = mpop(kwargs, 'ended_at', '_ended_at') if kwargs.get('timeZoneID'): # this comes from normal listing tz = timezone.WEBEX_TIMEZONE_ID_TO_PYTZ_LABEL_MAP[int( kwargs['timeZoneID'])] if self._starts_at is None and kwargs.get('startDate'): self._starts_at = time(kwargs['startDate'], tz) if self._ends_at is None and kwargs.get('endDate'): self._ends_at = time(kwargs['endDate'], tz) if kwargs.get('timezone'): # this comes form historical listing tz = timezone.WEBEX_TIMEZONE_ID_TO_PYTZ_LABEL_MAP[int( kwargs['timezone'])] if self._started_at is None and kwargs.get('sessionStartTime'): self._started_at = time(kwargs['sessionStartTime']).set_tz(tz) if self._ended_at is None and kwargs.get('sessionEndTime'): self._ended_at = time(kwargs['sessionEndTime']).set_tz(tz) self.description = mpop(kwargs, 'description') or None self.session_key = mpop(kwargs, 'session_key', 'sessionKey') self.visibility = mpop( kwargs, 'listing', 'listStatus', fallback=account.meetings_must_be_unlisted and 'UNLISTED' or 'PUBLIC').strip().lower()
def clean_starts_at_ndt(self): if not self.kwargs_instance or not self.kwargs_instance.account.is_gtw: tz = self.kwargs_instance and self.kwargs_instance.starts_at.tz or self.hub.timezone st = time(self.cleaned_data['starts_at_ndt'], tz) if time() > st: raise ValidationError("This is in the past!") return self.cleaned_data['starts_at_ndt']
def test_attendees(self): w = Webinar(self.organizer, key=2394, timezone='America/New_York', sessions=[]) s = Session(w, key=6043, started_at=time('2012-06-01'), attendees=[]) a1 = Registrant(webinar=w, session=s, key=9583, first_name=u'J\u00f6hn', last_name=u'Smith', email=u'*****@*****.**', viewings=[(time('4/1/11 12:01'), time('4/1/11 12:22')) ]) a2 = Registrant(webinar=w, session=s, key=2305, first_name=u'Suzy', last_name=u'Samwell', email=u'*****@*****.**', viewings=[(time('4/1/11 12:02'), time('4/1/11 12:07')), (time('4/1/11 12:12'), time('4/1/11 12:17')), (time('4/1/11 12:22'), time('4/1/11 12:27')) ]) with mocker(GetAttendees, text=self.attendees_json): session = Session(w, key=6043, started_at=time('2012-06-01')) self.assertEquals([a1, a2], session.attendees)
def __init__(self, **kwargs): super(Registrant, self).__init__() self.webinar = kwargs.get('webinar') self.session = kwargs.get('session') self.key = mget(kwargs, 'key', 'registrant_key', 'registrantKey') self.email = nlower(mget(kwargs, 'email', 'attendeeEmail')) self.first_name = mget(kwargs, 'first_name', 'firstName', 'first') self.last_name = mget(kwargs, 'last_name', 'lastName', 'last') if kwargs.get('name'): self.name = nstrip(kwargs.get('name')) self.registered_at = ntime( mget(kwargs, 'registered_at', 'registrationDate')) self.join_url = mget(kwargs, 'join_url', 'joinUrl') self.status = kwargs.get('status') self.viewings = kwargs.get('viewings', []) if not self.viewings and kwargs.get('attendance'): self.viewings = sort([(time(d['joinTime']), time(d['leaveTime'])) for d in kwargs['attendance']]) if not self.viewings and ( kwargs.get('duration') or kwargs.get('attendanceTimeInSeconds') ) and self.session and self.session.key and self.session.started_at: duration = kwargs.get( 'duration') or kwargs.get('attendanceTimeInSeconds') and delta( s=kwargs['attendanceTimeInSeconds']) self.viewings = [(self.session.started_at, self.session.started_at + duration)]
def get_today_local_noon_dt(my_tz_name): """ Given a timezone, get a sanetime object representing today's noon in that timezone. """ now_dt = time(tz=my_tz_name) local_noon_dt = time(now_dt.year, now_dt.month, now_dt.day, 12, 0, 0, 0, tz=my_tz_name) return local_noon_dt
def test_sleep(self): t1 = time() t2 = time() self.assertTrue(t2-t1 < delta(ms=10)) t1 = time() delta(ms=10).sleep() t2 = time() self.assertTrue(t2-t1 >= delta(ms=10))
def test_attendees(self): w = Webinar(self.organizer, key=2394, timezone='America/New_York', sessions=[]) s = Session(w, key=6043, started_at=time('2012-06-01'), attendees=[]) a1 = Registrant(webinar=w, session=s, key=9583, first_name=u'J\u00f6hn', last_name=u'Smith', email=u'*****@*****.**', viewings=[(time('4/1/11 12:01'), time('4/1/11 12:22'))]) a2 = Registrant(webinar=w, session=s, key=2305, first_name=u'Suzy', last_name=u'Samwell', email=u'*****@*****.**', viewings=[(time('4/1/11 12:02'), time('4/1/11 12:07')),(time('4/1/11 12:12'), time('4/1/11 12:17')),(time('4/1/11 12:22'), time('4/1/11 12:27'))]) with mocker(GetAttendees, text=self.attendees_json): session = Session(w, key=6043, started_at=time('2012-06-01')) self.assertEquals([a1,a2], session.attendees)
def __init__(self, start, delta=None, end=None): super(SaneSpan,self).__init__() self.start = time(start) if delta is None: end = time(end or 0) self.start = min(self.start,end) self.delta = end-self.start else: self.delta = delta
def test_hashability(self): t1 = time(JAN_MICROS, tz='UTC') t2 = time(JAN_MICROS, tz='America/New_York') t3 = time(JAN_MICROS+1) s = set([t1,t2,t3]) self.assertEquals(2, len(s)) self.assertIn(t1, s) self.assertIn(t2, s) self.assertIn(t3, s)
def lock(self, timeout=None, poll_rate=None): # poll_rate & timeout as deltas or micros timeout = ndelta(timeout) or self.timeout or delta(0) poll_rate = ndelta(poll_rate) or self.poll_rate start = time() while not self._attempt_lock(): if poll_rate: poll_rate.sleep() if time() > start + timeout: break return self.locked
def test_registrants(self): w = Webinar(self.organizer, key=2394, timezone='America/New_York', sessions=[]) s = Session(w, key=6043, started_at=time('2012-06-01'), attendees=[]) r1 = Registrant(webinar=w, session=s, key=9583, first_name=u'J\u00f6hn', last_name=u'Smith', email=u'*****@*****.**', registered_at='5/1/12 18:01', join_url='http://bit.ly/482024', status='APPROVED', viewings=[(time('4/1/11 12:01'), time('4/1/11 12:22'))]) r2 = Registrant(webinar=w, session=s, key=2305, first_name=u'Suzy', last_name=u'Samwell', email=u'*****@*****.**', registered_at='5/2/12 18:02', join_url='http://bit.ly/8592932', status='APPROVED', viewings=[(time('4/1/11 12:02'), time('4/1/11 12:07')),(time('4/1/11 12:12'), time('4/1/11 12:17')),(time('4/1/11 12:22'), time('4/1/11 12:27'))]) r3 = Registrant(webinar=w, session=s, key=4203, first_name=u'Todd', last_name=u'Kells', email=u'*****@*****.**', registered_at='5/3/12 18:03', join_url='http://bit.ly/9293842', status='WAITING', viewings=[]) with mocker(GetAttendees, text=self.attendees_json): with mocker(GetRegistrations, text=self.registrants_json): session = Session(w, key=6043, started_at=time('2012-06-01')) self.assertEquals([r1,r2,r3], session.registrants)
def calc_hashcode(kls, **kwargs): key = mget(kwargs,'remote_id', 'session_key', 'universal_key') if key: source = key else: title = mget(kwargs,'title','subject') us_starts_at = kwargs.get('_time_starts_at',kwargs.get('starts_at',time(0)).us) us_ends_at = kwargs.get('_time_ends_at',kwargs.get('ends_at',time(0)).us) source = '%s|%s|%s'%(title,us_starts_at,us_ends_at) return int(int(hashlib.md5(source).hexdigest(),16)%2**31)
def test_universal_key(self): w = Webinar(self.organizer, key=4918, timezone='America/New_York', sessions=[]) sa = Session(w, key=2058, started_at=time('2012-06-01')) sb = Session(w, key=2023, started_at=time('2012-06-02')) sc = Session(w, key=2084, started_at=time('2012-06-03')) w.sessions.append(sa) w.sessions.append(sb) w.sessions.append(sc) self.assertEquals('4918-1', sa.universal_key) self.assertEquals('4918-2', sb.universal_key) self.assertEquals('4918-3', sc.universal_key)
def get_fresh_last_modified_at(hub, guid): leads_client = hapi.leads.LeadsClient(settings.HUBSPOT_API_KEY, hub_id=hub.id, env=settings.API_ENV) leads = leads_client.get_leads( time_pivot = 'lastModifiedAt', sort = 'lastModifiedAt', dir = 'desc', max = 1, form_guid = guid) if leads: return time(us=leads[0]['lastModifiedAt']*1000 + 1000) else: return time(0)
def test_arithmetic(self): t1 = time(JAN_MICROS) t2 = time(JAN_MICROS+1) self.assertEquals(t2.us, (t1+1).us) self.assertEquals(t1.us,(t2-1).us) self.assertEquals(1, t2 - t1) self.assertEquals(-1, t1 - t2) self.assertEquals(t1.us,t1*1) self.assertEquals(t1.us,t1/1)
def test_switching_timezones(self): t1 = time(JAN_MICROS) self.assertEquals(pytz.timezone('America/New_York'), t1.with_tz('America/New_York').tz) self.assertEquals(t1, time(t1).set_tz('America/New_York')) self.assertEquals(t1, t1.with_tz('America/New_York')) self.assertNotEquals(t1.tz, time(t1).set_tz('America/New_York').tz) self.assertNotEquals(t1.tz, t1.with_tz('America/New_York').tz) self.assertEquals(pytz.timezone('America/New_York'), time(t1).set_tz('America/New_York').tz) self.assertEquals(pytz.timezone('America/New_York'), t1.with_tz('America/New_York').tz) t1_id = id(t1) self.assertEquals(t1_id, id(t1.set_tz('America/New_York'))) self.assertNotEquals(t1_id, id(t1.with_tz('America/New_York')))
def random(kls, account, count=None): events = [] for i in xrange(count or 1): guid = ''.join(str(uuid.uuid4()).split('-')) now = time(s=time().s,tz='America/New_York') events.append( Event( account, title = u'unittest #%s &<>\xfc\u2603 ' % guid[:16], description = u'#%s: An event created by unittests. If you\'re seeing this, then something went wrong. All events created by unittests are immediately cleaned up. &<>\xfc\u2603 ' % guid, starts_at = now+15*60*10**6, ends_at = now+30*60*10**6)) return count is None and events[0] or events
def test_huge_crud(self): size = 3000 event = Event.random(self.account).create() registrants = dict((r.email,r) for r in Registrant.random(event, size)) start = time() expected = dict((r.email,r) for r in event.create_registrants(registrants.values())) after_create = time() actual = dict((r.email,r) for r in event.general_registrants) after_listing = time() self.assertEquals(registrants, expected) self.assertEquals(expected, actual) self.assertEquals(size, len(expected)) print "\nCREATE TIMING: %sms\nLISTING TIMING: %sms" % ((after_create-start).ms, (after_listing-after_create).ms,)
def test_max_timing(self): # against all known accounts, and all their events: from pprint import pprint max_info = (None, 0) for k in self.th._accounts_dict.keys(): pprint(self.th[k]) for e in self.th[k].events: started = time() registrants = e.registrants elapsed = (time()-started).s if elapsed > max_info[1]: max_info = ("%s %s %s %s" % (e.account.site_name, e.session_key, e.title, len(registrants)), elapsed) print max_info print max_info
def setUp(self): self.userdata = {'username':'******', 'password':'******'} self.user = User.objects.create_user(**self.userdata) self.user = User.objects.get_by_natural_key('xenuuu') self.recipient = Recipient.objects.create(sender=self.user, sender_name='bob', sender_phone='000-000-0000', name='granny', email='*****@*****.**', timezone='America/Los_Angeles' ) self.subscription = TumblrSubscription.objects.create(recipient=self.recipient, short_name='bobs_monkeys', pretty_name="Bob's Monkey Photos", avatar='monkey.jpg') #supplying all fields to avoid call to tumblr start_dt = time() - delta(hours=1 * 7 * 24) end_dt = time() + delta(hours=1 * 7 * 24) self.vacation = Vacation.objects.create(recipient=self.recipient, start_date=start_dt.datetime, end_date=end_dt.datetime ) #urls of things that require you to be logged in to access self.login_required_urls = [ reverse_lazy('subscription_create_tumblr'), reverse_lazy('subscription_list'), reverse_lazy('subscription_detail_tumblr', kwargs={'pk':self.subscription.pk}), reverse_lazy('subscription_delete_tumblr', kwargs={'pk':self.subscription.pk}), reverse_lazy('recipient_create'), reverse_lazy('recipient_detail', kwargs={'pk':self.recipient.pk}), reverse_lazy('vacation_create', kwargs={'recipient_id':self.recipient.pk}), reverse_lazy('vacation_cancel', kwargs={'pk':self.vacation.pk}) #todo: delete the recipient ] #urls of things that you must own the object in question (or a related one) in order to access. #these will just be tested by being logged in as a user other than xenuuu self.ownership_required_urls = [ reverse_lazy('subscription_detail_tumblr', kwargs={'pk':self.subscription.pk}), reverse_lazy('subscription_delete_tumblr', kwargs={'pk':self.subscription.pk}), reverse_lazy('recipient_detail', kwargs={'pk':self.recipient.pk}), reverse_lazy('vacation_create', kwargs={'recipient_id':self.recipient.pk}), reverse_lazy('vacation_cancel', kwargs={'pk':self.vacation.pk}) ] self.login_url = reverse('auth_login')
def get_id(timezone_label): if timezone_label in PYTZ_LABEL_TO_WEBEX_TIMEZONE_ID_MAP: return PYTZ_LABEL_TO_WEBEX_TIMEZONE_ID_MAP[timezone_label] us = time().us dt = time(us).naive_datetime st = time(dt, tz=timezone_label) for tuple_ in TIMEZONE_DATA: for pytz_label in tuple_[2:]: testing_st = time(dt, tz=pytz_label) if st == testing_st: PYTZ_LABEL_TO_WEBEX_TIMEZONE_ID_MAP[timezone_label] = webex_timezone_id break return PYTZ_LABEL_TO_WEBEX_TIMEZONE_ID_MAP.get(timezone_label)
def new_or_edit(request, account_id=None, setup=False): from webinars_web.webinars import models as wm hub = models.Hub.ensure(request.marketplace.hub_id) kwargs = {'hub': hub} if account_id: kwargs['instance']=models.Account.objects.get(pk=account_id) if request.method == 'POST': # If the form has been submitted... if request.POST.get('cancel'): return HttpResponseRedirect('%saccounts'%request.marketplace.base_url) # Redirect for cancel if request.POST.get('account_type','1') == '2': redirect_uri = urlquote_plus('%s/webinars/hubs/%s/accounts/new?label=%s' % (settings.GTW_OAUTH_REDIRECT_PROTOCOL_HOST, hub.id, urlquote_plus(request.POST.get('extra','')))) return HttpResponseRedirect('https://api.citrixonline.com/oauth/authorize?client_id=%s&redirect_uri=%s' % (settings.GTW_API_KEY, redirect_uri)) form = AccountForm(request.POST, **kwargs) # A form bound to the POST data if form.is_valid(): # All validation rules pass if form.cleaned_data.get('account_type') == 1: deleted_possibles = wm.Account.objects.filter(hub=hub, username=form.cleaned_data['username'], extra=form.cleaned_data['extra'], deleted_at__isnull=False) elif form.cleaned_data.get('account_type') == 2: deleted_possibles =wm.Account.objects.filter(hub=hub, username=form.cleaned_data['username'], deleted_at__isnull=False) if deleted_possibles: account = deleted_possibles[0] account.deleted_at = None account.password = form.cleaned_data['password'] account.exclude_old_events_from_hubspot = bool(form.cleaned_data.get('exclude_old_events_from_hubspot')) if account.exclude_old_events_from_hubspot: ignore_delta = int(form.cleaned_data.get('exclusion_date_delta')) account.exclusion_date = (time() - delta(md=ignore_delta)).us else: account.exclusion_date = None else: account = form.save(commit=False) account.exclude_old_events_from_hubspot = bool(form.cleaned_data.get('exclude_old_events_from_hubspot')) if account.exclude_old_events_from_hubspot: ignore_delta = int(form.cleaned_data.get('exclusion_date_delta')) account.exclusion_date = (time() - delta(md=ignore_delta)).us print account.exclusion_date else: account.exclusion_date = None account.hub_id = request.marketplace.hub_id account.default = False account.prevent_unformed_lead_import = False account.save() account.hub.sync(visible=True) return HttpResponseRedirect('%sevents'%(request.marketplace.base_url)) # Redirect after POST else: form = AccountForm(**kwargs) # An unbound form return render_to_response('accounts/%s.djml'%(setup and 'setup' or account_id and 'edit' or 'new'), { 'form': form, 'account_types': models.AccountType.objects.all() }, context_instance=RequestContext(request))
def __init__(self, event, **kwargs): super(Registrant, self).__init__() self.event = event self.email = nlower(nstrip(mpop(kwargs, 'email', 'attendeeEmail'))) self.attendee_id = nstrip(mpop(kwargs, 'attendee_id', 'id', 'id_')) self.first_name = nstrip(mpop(kwargs, 'first_name', 'firstName', 'first')) self.last_name = nstrip(mpop(kwargs, 'last_name', 'lastName', 'last')) if kwargs.get('name'): self.name = nstrip(kwargs.pop('name')) self.viewings = kwargs.pop('viewings', []) started_at = mpop(kwargs, 'started_at', 'startTime') stopped_at = mpop(kwargs, 'stopped_at', 'endTime') if started_at and stopped_at: self._add_viewing((time(started_at), time(stopped_at))) self.ip_address = nstrip(mpop(kwargs, 'ip_address', 'ipAddress'))
def __unicode__(self): started_at = self.started_at and "%s +%s" % (time( self.started_at.us, self.timezone).strftime( '%m/%d/%y %I:%M%p').lower(), self.timezone) or '?' ended_at = self.ended_at and "%s +%s" % (time( self.ended_at.us, self.timezone).strftime( '%m/%d/%y %I:%M%p').lower(), self.timezone) or '?' registered_at = self.registered_at and "%s +%s" % (time( self.registered_at.us, self.timezone).strftime( '%m/%d/%y %I:%M%p').lower(), self.timezone) or '?' return u"%s[%s] %s (%s %s) %s [ %s ] (%s - %s) :%s =%s" % ( self.started_at and 'A' or 'R', self.key, self.email, self.first_name, self.last_name, self.duration, self.status or '?', started_at, ended_at, registered_at, self.join_url)
def test_sessioned_webinars(self): wa = Webinar(self.organizer, key=8471, sessions=[]) wa.sessions.append(Session(wa, key=4942, attendant_count=11, started_at=time('6/1/11 10:01'), ended_at=time('6/1/11 11:01'))) wb = Webinar(self.organizer, key=2394, sessions=[]) wb.sessions.append(Session(wb, key=6043, attendant_count=22, started_at=time('7/1/11 12:02'), ended_at=time('7/1/11 13:02'))) wb.sessions.append(Session(wb, key=5028, attendant_count=33, started_at=time('7/2/11 12:02'), ended_at=time('7/2/11 13:02'))) wb.sessions.append(Session(wb, key=4023, attendant_count=44, started_at=time('8/1/11 12:03'), ended_at=time('8/1/11 13:03'))) wc = Webinar(self.organizer, key=3948, sessions=[]) wc.sessions.append(Session(wc, key=8427, attendant_count=55, started_at=time('9/1/11 18:04'), ended_at=time('9/1/11 19:04'))) with mocker(GetSessionedWebinars, text=self.sessioned_json): self.assertEquals(wa, self.organizer.sessioned_webinars[0]) self.assertEquals(wb, self.organizer.sessioned_webinars[1]) self.assertEquals(wc, self.organizer.sessioned_webinars[2]) self.assertEquals([wa,wb,wc], self.organizer.sessioned_webinars)
def test_viewings(self): event = Event.random(self.account) r = Registrant(event, email="*****@*****.**") self.assertIsNone(r.started_at) self.assertIsNone(r.stopped_at) self.assertIsNone(r.duration_in_minutes) r.merge(Registrant(event, email="*****@*****.**", viewings=[[time(s=60*10),time(s=60*30)]])) self.assertEquals(60*10, r.started_at.s) self.assertEquals(60*30, r.stopped_at.s) self.assertEquals(20, r.duration_in_minutes) r.merge(Registrant(event, email="*****@*****.**", viewings=[[time(s=60*50),time(s=60*55)]])) self.assertEquals(60*10, r.started_at.s) self.assertEquals(60*55, r.stopped_at.s) self.assertEquals(45, r.duration_in_minutes)
def random(kls, account, count=None): events = [] for i in xrange(count or 1): guid = ''.join(str(uuid.uuid4()).split('-')) now = time(s=time().s, tz='America/New_York') events.append( Event( account, title=u'unittest #%s &<>\xfc\u2603 ' % guid[:16], description= u'#%s: An event created by unittests. If you\'re seeing this, then something went wrong. All events created by unittests are immediately cleaned up. &<>\xfc\u2603 ' % guid, starts_at=now + 15 * 60 * 10**6, ends_at=now + 30 * 60 * 10**6)) return count is None and events[0] or events
def __init__(self, event, **kwargs): super(Registrant, self).__init__() self.event = event self.email = nlower(nstrip(mpop(kwargs, 'email', 'attendeeEmail'))) self.attendee_id = nstrip(mpop(kwargs, 'attendee_id', 'id', 'id_')) self.first_name = nstrip( mpop(kwargs, 'first_name', 'firstName', 'first')) self.last_name = nstrip(mpop(kwargs, 'last_name', 'lastName', 'last')) if kwargs.get('name'): self.name = nstrip(kwargs.pop('name')) self.viewings = kwargs.pop('viewings', []) started_at = mpop(kwargs, 'started_at', 'startTime') stopped_at = mpop(kwargs, 'stopped_at', 'endTime') if started_at and stopped_at: self._add_viewing((time(started_at), time(stopped_at))) self.ip_address = nstrip(mpop(kwargs, 'ip_address', 'ipAddress'))
def get_id(timezone_label): if timezone_label in PYTZ_LABEL_TO_WEBEX_TIMEZONE_ID_MAP: return PYTZ_LABEL_TO_WEBEX_TIMEZONE_ID_MAP[timezone_label] us = time().us dt = time(us).naive_datetime st = time(dt, tz=timezone_label) for tuple_ in TIMEZONE_DATA: for pytz_label in tuple_[2:]: testing_st = time(dt, tz=pytz_label) if st == testing_st: PYTZ_LABEL_TO_WEBEX_TIMEZONE_ID_MAP[ timezone_label] = webex_timezone_id break return PYTZ_LABEL_TO_WEBEX_TIMEZONE_ID_MAP.get(timezone_label)
def get_today_bucket_for_time(desired_time, timezone): """ Given a time of day in a user's timezone, figure out what bucket that time maps to for today. This can vary with DST, which is the reason this method exists. @param time a string in HH:MM:SS @param timezone an Olson string (e.g. "America/Los_Angeles") @return int a bucket_t, 0-47, with the bucket on TODAY'S DATE with above time and timezone """ now = get_current_time_utc() # Use get_current_time_utc() to allow for mocking. now.set_tz(timezone) #generate a string version of today's date (in user's timezone) desired_date = now.strftime('%Y-%m-%d') #Merge time of day with date, yielding something like 12:24:00 2012-07-30 desired_time_with_date = "%s %s" % (desired_date, desired_time) #Create a new sanetime object using the full string and the user's timezone desired_dt = sanetime.time(desired_time_with_date, tz=timezone) return sanetime_to_bucket(desired_dt)
def authorize(self): r = requests.post(self.url, data={ 'grant_type': 'password', 'client_id': self.client_id, 'client_secret': self.client_secret, 'username': self.username, 'password': self.password + self.security_token}) r.raise_for_status() authdata = r.json() self.access_token = authdata['access_token'] self.issued_at = time(int(authdata['issued_at'])) self.instance_url = Href(authdata['instance_url']) # There are two other fields in this response that we are ignoring for # now (thx akovacs): # 1. `id` - This is a url that leads to a description about the user # associated with this authorization session, including email, name # and username. # 2. `signature` - This is a base64-encoded HMAC-SHA256 that can be # used to verify that the response was not tampered with. As we # become larger, it probably makes sense to verify this, but for # now, we'll leave it alone. self.id_url = authdata['id'] self.signature = authdata['signature']
def pre_save(self, model_instance, add): if self.auto_now or (self.auto_now_add and add): value = time() setattr(model_instance, self.attname, value) return value else: return super(SaneTimeField, self).pre_save(model_instance, add)
def test_merge(self): w = Webinar(self.organizer, key=2394, timezone='America/New_York', sessions=[]) s = Session(w, key=6043, started_at=time('2012-06-01'), attendees=[]) r = Registrant(webinar=w, session=s, key=2305, first_name=u'Suzy', last_name=u'Samwell', email=u'*****@*****.**', registered_at='5/2/12 18:02', join_url='http://bit.ly/8592932', status='APPROVED', viewings=[(time('4/1/11 12:02'), time('4/1/11 12:07')),(time('4/1/11 12:12'), time('4/1/11 12:17')),(time('4/1/11 12:22'), time('4/1/11 12:27'))]) rx = r.clone() rx.merge(rx) self.assertEquals(r, rx)
def __init__(self, event, shard_depth, shard_slice): super(WebexRegistrantRemoteStore,self).__init__() self.event = event self.shard_depth = shard_depth self.shard_slice = shard_slice self.webex_event = event.webex_event self.now = time()
def test_delete_started_vacation(self): """ Cancelling a vacation that's already started actually sets the end date to now, not actually deleting it. """ self.client.login(**self.userdata) start = time(tz='UTC') - delta(hours=2*24) #started yesterday end = start + delta(hours=7*24) vacation = Vacation.objects.create(recipient=self.recipient, start_date=start.datetime, end_date=end.datetime) url = reverse('vacation_cancel', kwargs={'pk':vacation.pk}) self.client.post(url) vacation = Vacation.objects.get(pk=vacation.pk) #reload self.assertTrue(vacation.end_date <= time(tz='UTC').datetime)
def __init__(self, event, shard_depth, shard_slice): super(GTWRegistrantRemoteStore,self).__init__() self.event = event self.shard_depth = shard_depth self.shard_slice = shard_slice self.gtw_session = event.gtw_session self.now = time()
def __init__(self, account, shard_depth, shard_slice): super(GTWEventRemoteStore, self).__init__() self.account = account self.shard_depth = shard_depth self.shard_slice = shard_slice self.organizer = self.account.gtw_organizer self.now = time()
def test_max_timing( self): # against all known accounts, and all their events: from pprint import pprint max_info = (None, 0) for k in self.th._accounts_dict.keys(): pprint(self.th[k]) for e in self.th[k].events: started = time() registrants = e.registrants elapsed = (time() - started).s if elapsed > max_info[1]: max_info = ("%s %s %s %s" % (e.account.site_name, e.session_key, e.title, len(registrants)), elapsed) print max_info print max_info
def __init__(self, account, shard_depth, shard_slice): super(WebexEventRemoteStore, self).__init__() self.account = account self.shard_depth = shard_depth self.shard_slice = shard_slice self.webex_account = self.account.webex_account self.now = time()
def test_register(self): w = Webinar(self.organizer, key=2394, timezone='America/New_York', sessions=[]) s = Session(w, key=6043, started_at=time('2012-06-01'), attendees=[]) s.attendees.append( Registrant(webinar=w, session=s, key=2305, first_name=u'Suzy', last_name=u'Samwell', email=u'*****@*****.**', duration=delta(s=4931))) with mocker(CreateRegistrant, text=self.registered_json): seed_registrant = Registrant(webinar=w, session=s, first_name=u'J\u00f6hn', last_name=u'Smith', email=u'*****@*****.**') expected_registrant = Registrant( webinar=w, session=s, key=2038, first_name=u'J\u00f6hn', last_name=u'Smith', email=u'*****@*****.**', join_url='https://bit.ly/00293423') self.assertEquals(expected_registrant, seed_registrant.create())
def fill(kls, account): from webinars_web.webinars import models as wm now = time() events = account.webex_account.events local_events = dict((e.remote_id, e) for e in wm.Event.objects.filter(account=account, deleted_at__isnull=True) if e.remote_id) with kls.delayed as d: for event in events: raw_event = dict( account_id = account.id, title = event.title, description = event.description, _timezone = event.starts_at.tz_name, _time_starts_at = nint(ntime(event._starts_at)), _time_ends_at = nint(ntime(event._ends_at)), _time_started_at = nint(ntime(event._started_at)), _time_ended_at = nint(ntime(event._ended_at)), session_key = event.session_key ) # if customer is automatically deleting events on webex side, then we make it seem like they haven't (so we don't start pumping NULLs into ends_at and starts_at) local_event = local_events.get(event.session_key) if local_event and local_event._ends_at and now > local_event._ends_at: raw_event['_time_starts_at'] = int(local_event._time_starts_at) raw_event['_time_ends_at'] = int(local_event._time_ends_at) raw_event['_timezone'] = local_event._timezone raw_event['hashcode'] = kls.calc_hashcode(**raw_event) d.insert(raw_event) from django.db import transaction transaction.commit_unless_managed() return len(events)
def metrics(request): from webinars_web.webinars import models as wm now = time() data = {} data['installs']=[0]*7 data['uninstalls']=[0]*7 data['new_mrr']=[0]*7 data['recurring_mrr']=[0]*7 data['months']=['Dec','Jan','Feb','Mar','Apr','May','Jun'] for hub in wm.Hub.objects.filter(internal=False): if hub.friends_and_family: continue install_month = hub.created_at.month print install_month uninstall_month = hub.uninstalled_at and hub.uninstalled_at.month data['installs'][install_month%12] += 1 if uninstall_month is not None: data['uninstalls'][uninstall_month%12] += 1 if not hub.beta: mrr_at = hub.created_at + delta(md=30) new = True while mrr_at < now and (uninstall_month is None or mrr_at < hub.uninstalled_at): if new: data['new_mrr'][mrr_at.month%12] += 50 new = False else: data['recurring_mrr'][mrr_at.month%12] += 50 mrr_at = mrr_at + delta(md=30) data['net_installs'] = [] data['total_mrr'] = [] for i in range(len(data['months'])): data['net_installs'].append(data['installs'][i]-data['uninstalls'][i]) data['total_mrr'].append(data['new_mrr'][i]+data['recurring_mrr'][i]) for attr in ('installs','uninstalls','net_installs','new_mrr','recurring_mrr','total_mrr'): data['total_%s'%attr] = sum(data[attr]) return render_to_response('hubs/metrics.djml', { 'data': data }, context_instance=RequestContext(request))
def __init__(self, event, shard_depth, shard_slice): super(HubSpotRegistrantRemoteStore,self).__init__() self.event = event self.shard_depth = shard_depth self.shard_slice = shard_slice self.hub = event.hub self.leads_client = hapi_plus.leads.LeadsClient(event.settings.HUBSPOT_API_KEY, hub_id=self.hub.id, env=event.settings.API_ENV, timeout=20) self.now = time()
def test_registrants(self): w = Webinar(self.organizer, key=2394, timezone='America/New_York', sessions=[]) s = Session(w, key=6043, started_at=time('2012-06-01'), attendees=[]) r1 = Registrant(webinar=w, session=s, key=9583, first_name=u'J\u00f6hn', last_name=u'Smith', email=u'*****@*****.**', registered_at='5/1/12 18:01', join_url='http://bit.ly/482024', status='APPROVED', viewings=[(time('4/1/11 12:01'), time('4/1/11 12:22')) ]) r2 = Registrant(webinar=w, session=s, key=2305, first_name=u'Suzy', last_name=u'Samwell', email=u'*****@*****.**', registered_at='5/2/12 18:02', join_url='http://bit.ly/8592932', status='APPROVED', viewings=[(time('4/1/11 12:02'), time('4/1/11 12:07')), (time('4/1/11 12:12'), time('4/1/11 12:17')), (time('4/1/11 12:22'), time('4/1/11 12:27')) ]) r3 = Registrant(webinar=w, session=s, key=4203, first_name=u'Todd', last_name=u'Kells', email=u'*****@*****.**', registered_at='5/3/12 18:03', join_url='http://bit.ly/9293842', status='WAITING', viewings=[]) with mocker(GetAttendees, text=self.attendees_json): with mocker(GetRegistrations, text=self.registrants_json): session = Session(w, key=6043, started_at=time('2012-06-01')) self.assertEquals([r1, r2, r3], session.registrants)
def test_huge_crud(self): size = 3000 event = Event.random(self.account).create() registrants = dict( (r.email, r) for r in Registrant.random(event, size)) start = time() expected = dict( (r.email, r) for r in event.create_registrants(registrants.values())) after_create = time() actual = dict((r.email, r) for r in event.general_registrants) after_listing = time() self.assertEquals(registrants, expected) self.assertEquals(expected, actual) self.assertEquals(size, len(expected)) print "\nCREATE TIMING: %sms\nLISTING TIMING: %sms" % ( (after_create - start).ms, (after_listing - after_create).ms, )
def _get_or_create_event(self, size): title = 'unittests: permanent event (%s)' % size event = None for e in self.account.listed_events: if e.title == title: return e event = Event.random(self.account) event.title = title event.starts_at = time('2013-01-01 00:00:00', tz='America/New_York') event.create() event.create_registrants(Registrant.random(event, size)) return event
def test_future_webinars(self): wa = Webinar(self.organizer, key=1034, subject=subject(8), description=description(8), timezone=u"America/New_York", sessions=[]) wa.sessions.append( Session(wa, starts_at=time('6/1/12 10:00'), ends_at=time('6/1/12 11:00'))) wb = Webinar(self.organizer, key=9582, subject=subject(9), description=description(9), timezone=u"America/New_York", sessions=[]) wb.sessions.append( Session(wb, starts_at=time('7/1/12 12:00'), ends_at=time('7/1/12 13:00'))) wb.sessions.append( Session(wb, starts_at=time('7/2/12 12:00'), ends_at=time('7/2/12 13:00'))) with mocker(GetFutureWebinars, text=self.future_json): self.assertEquals([wa, wb], self.organizer.future_webinars)
def test_viewings(self): event = Event.random(self.account) r = Registrant(event, email="*****@*****.**") self.assertIsNone(r.started_at) self.assertIsNone(r.stopped_at) self.assertIsNone(r.duration_in_minutes) r.merge( Registrant(event, email="*****@*****.**", viewings=[[time(s=60 * 10), time(s=60 * 30)]])) self.assertEquals(60 * 10, r.started_at.s) self.assertEquals(60 * 30, r.stopped_at.s) self.assertEquals(20, r.duration_in_minutes) r.merge( Registrant(event, email="*****@*****.**", viewings=[[time(s=60 * 50), time(s=60 * 55)]])) self.assertEquals(60 * 10, r.started_at.s) self.assertEquals(60 * 55, r.stopped_at.s) self.assertEquals(45, r.duration_in_minutes)
def test_sessioned_webinars(self): wa = Webinar(self.organizer, key=8471, sessions=[]) wa.sessions.append( Session(wa, key=4942, attendant_count=11, started_at=time('6/1/11 10:01'), ended_at=time('6/1/11 11:01'))) wb = Webinar(self.organizer, key=2394, sessions=[]) wb.sessions.append( Session(wb, key=6043, attendant_count=22, started_at=time('7/1/11 12:02'), ended_at=time('7/1/11 13:02'))) wb.sessions.append( Session(wb, key=5028, attendant_count=33, started_at=time('7/2/11 12:02'), ended_at=time('7/2/11 13:02'))) wb.sessions.append( Session(wb, key=4023, attendant_count=44, started_at=time('8/1/11 12:03'), ended_at=time('8/1/11 13:03'))) wc = Webinar(self.organizer, key=3948, sessions=[]) wc.sessions.append( Session(wc, key=8427, attendant_count=55, started_at=time('9/1/11 18:04'), ended_at=time('9/1/11 19:04'))) with mocker(GetSessionedWebinars, text=self.sessioned_json): self.assertEquals(wa, self.organizer.sessioned_webinars[0]) self.assertEquals(wb, self.organizer.sessioned_webinars[1]) self.assertEquals(wc, self.organizer.sessioned_webinars[2]) self.assertEquals([wa, wb, wc], self.organizer.sessioned_webinars)
def generate_checkins(): # let's make 10000 checkins with 50 users and 500 places # ranging from 9am-9pm between 2012-9-1 and 2012-9-30 cn = Connection(MONGOURL) db = cn['thehotspot'] count_users = db.users.count() count_places = db.places1.count() for i in xrange(10000): dt = time(2012,9, randint(1,30), randint(9,21)).datetime ruser = randint(1,count_users) rplace = randint(1,count_places) db.checkins.save({ 'user_id': ruser, 'place_id': rplace, 'datetime': dt }) db.users.update({"_id":ruser}, {"$inc": {"checkins":1}}) db.places1.update({"_id":rplace}, {"$inc": {"checkins":1}}) db.places2.update({"_id":rplace}, {"$inc": {"checkins":1}}) db.places3.update({"_id":rplace}, {"$inc": {"checkins":1}})
def test_past_webinars(self): wa = Webinar(self.organizer, key=8471, subject=subject(1), description=description(1), timezone=u"America/New_York", sessions=[]) wa.sessions.append( Session(wa, starts_at=time('6/1/11 10:00'), ends_at=time('6/1/11 11:00'))) wb = Webinar(self.organizer, key=2394, subject=subject(2), description=description(2), timezone=u"America/New_York", sessions=[]) wb.sessions.append( Session(wb, starts_at=time('7/1/11 12:00'), ends_at=time('7/1/11 13:00'))) wb.sessions.append( Session(wb, starts_at=time('7/2/11 12:00'), ends_at=time('7/2/11 13:00'))) wc = Webinar(self.organizer, key=3948, subject=subject(3), description=description(3), timezone=u"America/Los_Angeles", sessions=[]) wc.sessions.append( Session(wc, starts_at=time('9/1/11 13:00'), ends_at=time('9/1/11 14:00'))) with mocker(GetPastWebinars, text=self.past_json): self.assertEquals([wa, wb, wc], self.organizer.past_webinars)
def test_clone(self): w = Webinar(self.organizer, key=2394, timezone='America/New_York', sessions=[]) s = Session(w, key=6043, started_at=time('2012-06-01'), attendees=[]) r = Registrant(webinar=w, session=s, key=2305, first_name=u'Suzy', last_name=u'Samwell', email=u'*****@*****.**', registered_at='5/2/12 18:02', join_url='http://bit.ly/8592932', status='APPROVED', viewings=[(time('4/1/11 12:02'), time('4/1/11 12:07')), (time('4/1/11 12:12'), time('4/1/11 12:17')), (time('4/1/11 12:22'), time('4/1/11 12:27'))]) rx = r.clone() self.assertEquals(r, rx)
import matplotlib import pandas as pd import pandas.io.sql as pd_sql from scipy import sparse import itertools #from sklearn.preprocessing import normalize #import bottleneck question_dict = dict() print 'building question_dict' for i in questions.index: time_in_sec = sanetime.time(questions.ix[i][3]).seconds question_dict[questions.ix[i][0]] = time_in_sec #add (answerID, time) pair to dictionary for O(1) lookup. print 'buildling time_delta' #populate the deltas (question answered time - question asked time in seconds.) time_delta = [] for i in answers.index: question_t = question_dict[answers.ix[i][1]] #time question was asked. answered_t = sanetime.time(answers.ix[i][3]).seconds #time answered. delta = answered_t - question_t time_delta.append(delta)
def compress(self, data_list): st = None dt = super(SplitSaneTzTimeField, self).compress(data_list) if dt is not None: st = time(dt, self.tz) return st
def test_view_collapsing(self): event = Event.random(self.account) r1 = Registrant(event, email="*****@*****.**", viewings=[[time(s=100), time(s=200)]]) self.assertEquals([[time(s=100), time(s=200)]], r1.viewings) r2 = Registrant(event, email="*****@*****.**", viewings=[[time(s=300), time(s=400)]]) self.assertEquals([[time(s=300), time(s=400)]], r2.viewings) r1.merge(r2) self.assertEquals([[time(s=100), time(s=200)], [time(s=300), time(s=400)]], r1.viewings)
def __init__(self, **kwargs): super(Organizer, self).__init__() self.oauth = mget(kwargs,'oauth','oauth_token','access_token') self.key = mget(kwargs,'key','organizerKey','organizer_key') self.now = time() self.starts_at = kwargs.get('starts_at',None) or (self.now - DEFAULT_HISTORY_DELTA)
def test_webinars(self): wa = Webinar(self.organizer, key=8471, subject=subject(1), description=description(1), timezone=u"America/New_York", sessions=[]) wa.sessions.append( Session(wa, key=4942, attendant_count=11, started_at=time('6/1/11 10:01'), ended_at=time('6/1/11 11:01'), starts_at=time('6/1/11 10:00'), ends_at=time('6/1/11 11:00'))) wb = Webinar(self.organizer, key=2394, subject=subject(2), description=description(2), timezone=u"America/New_York", sessions=[]) wb.sessions.append( Session(wb, key=6043, attendant_count=22, started_at=time('7/1/11 12:02'), ended_at=time('7/1/11 13:02'), starts_at=time('7/1/11 12:00'), ends_at=time('7/1/11 13:00'))) wb.sessions.append( Session(wb, key=5028, attendant_count=33, started_at=time('7/2/11 12:02'), ended_at=time('7/2/11 13:02'), starts_at=time('7/2/11 12:00'), ends_at=time('7/2/11 13:00'))) wb.sessions.append( Session(wb, key=4023, attendant_count=44, started_at=time('8/1/11 12:03'), ended_at=time('8/1/11 13:03'))) wc = Webinar(self.organizer, key=3948, subject=subject(3), description=description(3), timezone=u"America/Los_Angeles", sessions=[]) wc.sessions.append( Session(wc, key=8427, attendant_count=55, started_at=time('9/1/11 18:04'), ended_at=time('9/1/11 19:04'), starts_at=time('9/1/11 13:00'), ends_at=time('9/1/11 14:00'))) wd = Webinar(self.organizer, key=1034, subject=subject(8), description=description(8), timezone=u"America/New_York", sessions=[]) wd.sessions.append( Session(wd, starts_at=time('6/1/12 10:00'), ends_at=time('6/1/12 11:00'))) we = Webinar(self.organizer, key=9582, subject=subject(9), description=description(9), timezone=u"America/New_York", sessions=[]) we.sessions.append( Session(we, starts_at=time('7/1/12 12:00'), ends_at=time('7/1/12 13:00'))) we.sessions.append( Session(we, starts_at=time('7/2/12 12:00'), ends_at=time('7/2/12 13:00'))) with mocker(GetPastWebinars, text=self.past_json): with mocker(GetFutureWebinars, text=self.future_json): with mocker(GetSessionedWebinars, text=self.sessioned_json): self.assertEquals([wa, wb, wc, wd, we], self.organizer.webinars) for w in self.organizer.webinars: for s in w.sessions: self.assertEquals(id(w), id(s.webinar))