class EventAPI(handlers.JsonRequestHandler): @authorized.role('user') def list(self, d): page, max, offset = tools.paging_params(self.request) events = Event.Fetch(self.user, limit=max, offset=offset) self.set_response({ 'events': [event.json() for event in events] }, debug=True, success=True) @authorized.role('user') def update(self, d): ''' Create or update ''' id = self.request.get_range('id') params = tools.gets(self, strings=['title', 'details', 'color'], dates=['date_start', 'date_end'], booleans=['ongoing'], supportTextBooleans=True ) event = self.user.get(Event, id=id) if not event: start = params.get('date_start') if start: event = Event.Create(self.user, params.get('date_start')) if event: event.Update(**params) event.put() self.success = True self.message = "Event saved" else: self.message = "Couldn't create event" self.set_response({ 'event': event.json() if event else None }) @authorized.role('user') def batch_create(self, d): try: events = json.loads(self.request.get('events')) except ValueError, e: self.message = "Malformed JSON" events = [] dbp = [] for e in events: if 'date_start' in e and isinstance(e['date_start'], basestring): e['date_start'] = tools.fromISODate(e['date_start']) if 'date_end' in e and isinstance(e['date_end'], basestring): e['date_end'] = tools.fromISODate(e['date_end']) if e.get('date_end') else e.get('date_start') if not e.get('date_end'): e['date_end'] = e.get('date_start') e = Event.Create(self.user, **e) dbp.append(e) if dbp: ndb.put_multi(dbp) self.success = True self.message = "Creating %d event(s)" % len(dbp) self.set_response()
def get(self): from services.github import GithubClient last_date = self.request.get('date') if last_date: last_date = tools.fromISODate(last_date) else: last_date = (datetime.today() - timedelta(days=1)) users = User.SyncActive('github') res = {} td_put = [] for user in users: gh_client = GithubClient(user) if gh_client._can_run(): date_range = [(last_date - timedelta(days=x)).date() for x in range(self.GH_COMMIT_OVERLAP)] logging.debug("Running SyncGithub cron for %s on %s..." % (user, date_range)) commits_dict = gh_client.get_contributions_on_date_range( date_range) if commits_dict is not None: for date, n_commits in commits_dict.items(): td = TrackingDay.Create(user, date) td.set_properties({'commits': n_commits}) td_put.append(td) else: logging.debug("Github updater can't run") if td_put: ndb.put_multi(td_put) self.json_out(res)
def get(self): from services.github import GithubClient last_date = self.request.get('date') if last_date: last_date = tools.fromISODate(last_date) else: last_date = (datetime.today() - timedelta(days=1)) users = User.SyncActive('github') res = {} td_put = [] for user in users: gh_client = GithubClient(user) if gh_client._can_run(): date_range = [(last_date - timedelta(days=x)).date() for x in range(self.GH_COMMIT_OVERLAP)] logging.debug("Running SyncGithub cron for %s on %s..." % (user, date_range)) commits_dict = gh_client.get_contributions_on_date_range(date_range) if commits_dict is not None: for date, n_commits in commits_dict.items(): td = TrackingDay.Create(user, date) td.set_properties({ 'commits': n_commits }) td_put.append(td) else: logging.debug("Github updater can't run") if td_put: ndb.put_multi(td_put) self.json_out(res)
def get(self): from services.github import GithubClient date = self.request.get('date') if date: date = tools.fromISODate(date).date() else: date = (datetime.today() - timedelta(days=1)).date() users = User.SyncActive('github') res = {} td_put = [] for user in users: gh_client = GithubClient(user) logging.debug("Running SyncGithub cron for %s on %s..." % (user, date)) if gh_client._can_run(): commits = gh_client.get_contributions_on_day(date) if commits is not None: td = TrackingDay.Create(user, date) td.set_properties({'commits': commits}) td_put.append(td) res = td.json() else: logging.debug("Github updater can't run") if td_put: ndb.put_multi(td_put) self.json_out(res, debug=True)
def submit(self, d): ''' Submit today's journal (yesterday if 00:00 - 04:00) ''' date = None _date = self.request.get('date') if _date: date = tools.fromISODate(_date) task_json = tools.getJson(self.request.get('tasks')) # JSON params = tools.gets(self, strings=['lat', 'lon', 'tags_from_text'], json=['data'], lists=['tags']) logging.debug(params) if params.get('data'): if not params.get('tags'): params['tags'] = [] jrnl = MiniJournal.Create(self.user, date) jrnl.Update(**params) jrnl.parse_tags() jrnl.put() if task_json: # Save new tasks for tomorrow tasks = [] for t in task_json: if t: task = Task.Create(self.user, t) tasks.append(task) ndb.put_multi(tasks) self.success = True self.message = "Journal submitted!" self.set_response({'journal': jrnl.json() if jrnl else None})
def testFromISO(self): volley = [('2017-01-01', datetime(2017, 1, 1)), ('2001-12-31', datetime(2001, 12, 31)), ('1985-04-04', datetime(1985, 4, 4))] for v in volley: target = v[1] actual = tools.fromISODate(v[0]) self.assertEqual(actual, target)
def testFromISO(self): volley = [ ('2017-01-01', datetime(2017, 1, 1)), ('2001-12-31', datetime(2001, 12, 31)), ('1985-04-04', datetime(1985, 4, 4)) ] for v in volley: target = v[1] actual = tools.fromISODate(v[0]) self.assertEqual(actual, target)
def range(self, d): ''' Return recent days of all active habits ''' start = self.request.get('start_date') end = self.request.get('end_date') habits = Habit.Active(self.user) habitdays = HabitDay.Range(self.user, habits, tools.fromISODate(start), until_date=tools.fromISODate(end)) self.set_response( { 'habits': [habit.json() for habit in habits], 'habitdays': tools.lookupDict(habitdays, keyprop="key_id", valueTransform=lambda hd: hd.json()) }, success=True)
def get(self, d): # TODO: Async fetches with_habits = self.request.get_range('with_habits', default=0) == 1 with_tracking = self.request.get_range('with_tracking', default=1) == 1 with_goals = self.request.get_range('with_goals', default=1) == 1 with_tasks = self.request.get_range('with_tasks', default=1) == 1 date_start = self.request.get('date_start') date_end = self.request.get('date_end') dt_start, dt_end = tools.fromISODate(date_start), tools.fromISODate( date_end) iso_dates = [] habits = [] today = datetime.today() habitdays = [] goals = [] journals, iso_dates = MiniJournal.Fetch(self.user, dt_start, dt_end) if with_habits: habits = Habit.Active(self.user) habitdays = HabitDay.Range(self.user, habits, dt_start, dt_end) if with_tracking: tracking_days = TrackingDay.Range(self.user, dt_start, dt_end) if with_goals: goals = Goal.Year(self.user, today.year) if with_tasks: tasks = Task.DueInRange(self.user, dt_start, dt_end, limit=100) self.set_response( { 'dates': iso_dates, 'journals': [j.json() for j in journals if j], 'habits': [h.json() for h in habits], 'goals': [g.json() for g in goals], 'tasks': [t.json() for t in tasks], 'tracking_days': [p.json() for p in tracking_days], 'habitdays': tools.lookupDict(habitdays, keyprop="key_id", valueTransform=lambda hd: hd.json()) }, success=True)
def batch_create(self, d): quotes = json.loads(self.request.get('quotes')) dbp = [] for q in quotes: if 'dt_added' in q and isinstance(q['dt_added'], basestring): q['dt_added'] = tools.fromISODate(q['dt_added']) q = Quote.Create(self.user, **q) dbp.append(q) if dbp: ndb.put_multi(dbp) self.success = True self.message = "Putting %d" % len(dbp) self.set_response()
def commit(self, d): ''' Mark done/not-done for a habit day ''' from constants import HABIT_COMMIT_REPLIES habit_id = self.request.get_range('habit_id') day_iso = self.request.get('date') habit = self.user.get(Habit, id=habit_id) hd = None if habit: hd = HabitDay.Commit(habit, tools.fromISODate(day_iso)) self.message = random.choice(HABIT_COMMIT_REPLIES) self.success = True self.set_response({'habitday': hd.json() if hd else None})
def list(self, d): days = self.request.get_range('days', default=4) before_date = datetime.today() _before_date = self.request.get('before_date') if _before_date: before_date = tools.fromISODate(_before_date) cursor = before_date journal_keys = [] for i in range(days): iso_date = tools.iso_date(cursor) journal_keys.append(ndb.Key('MiniJournal', iso_date, parent=self.user.key)) cursor -= timedelta(days=1) journals = ndb.get_multi(journal_keys) self.set_response({ 'journals': [j.json() for j in journals if j] }, success=True)
def update(self, d): ''' Update a single TrackingDay() object with properties defined via JSON key(str) -> value(str) ''' date = None _date = self.request.get('date') if _date: date = tools.fromISODate(_date) data_json = tools.getJson(self.request.get('data')) # JSON td = TrackingDay.Create(self.user, date) # Get or create if data_json: td.set_properties(data_json) td.put() self.success = True self.set_response({'tracking_day': td.json() if td else None})
def toggle(self, d): ''' Mark done/not-done for a habit day ''' from constants import HABIT_DONE_REPLIES habit_id = self.request.get_range('habit_id') day_iso = self.request.get('date') habit = Habit.get_by_id(habit_id, parent=self.user.key) hd = None if habit: marked_done, hd = HabitDay.Toggle(habit, tools.fromISODate(day_iso)) if marked_done: self.message = random.choice(HABIT_DONE_REPLIES) self.success = True self.set_response({'habitday': hd.json() if hd else None})
def update(self, d): id = self.request.get('id') params = tools.gets( self, strings=['source', 'content', 'link', 'location', 'date'], lists=['tags']) quote = None if id: quote = self.user.get(Quote, id=id) else: if 'date' in params: params['dt_added'] = tools.fromISODate(params.get('date')) quote = Quote.Create(self.user, **params) self.message = "Quote saved!" if quote else "Couldn't create quote" self.success = quote is not None quote.Update(**params) quote.put() self.set_response({'quote': quote.json() if quote else None})
def background_service_fetch(uid, mckeys=None, limit=20): '''Fetch data from all requested services and store to memcache -- may be slow. ''' user = User.get_by_id(int(uid)) if user and mckeys: http_auth = user.get_http_auth() if http_auth: to_cache = {} for mckey in mckeys: to_cache[mckey] = { 'items': [], 'status': SERVICE.LOADING, 'issue': None } # Set loading status memcache.set_multi(to_cache) for mckey in mckeys: svc, date = mckey.split(':') date_dt = tools.fromISODate(date) next_date_dt = date_dt + timedelta(days=1) items = [] issue = None try: fetcher_class = getattr(services, 'ServiceFetcher_%s' % svc) if issubclass(fetcher_class, services.ServiceFetcher): fetcher = fetcher_class(user=user, date_dt=date_dt, next_date_dt=next_date_dt, http_auth=http_auth, limit=limit) items = fetcher.fetch() success = True else: logging.error("Failed to get fetcher_class for %s" % svc) except Exception, e: issue = "Error fetching from %s - %s" % (svc, e) to_cache = { 'items': items, 'status': SERVICE.LOADED if not issue else SERVICE.ERROR, 'issue': issue } memcache.set(mckey, to_cache, time=MEMCACHE_EXPIRE_SECS) if date: # Log search DaySearch.Increment(user=user, date=date)