def get_group_data(group): """Return dictionary with schedule and date keys or None""" groupdata = memcache.get("group%s" % (group)) if groupdata is not None: # данные находятся в кэше logging.info("Get data for %s from cache" % group) return groupdata else: # Нет в кэше group_schedule = GroupSchedule.query(GroupSchedule.group == group).get() # TODO: .get_by_id(...) if group_schedule: if (datetime.datetime.now() - group_schedule.date).total_seconds() < MAX_DB_TIME: # в БД dicdata = { DICTDATA_SCHEDULE_KEY_NAME: group_schedule.schedule, DICTDATA_DATE_KEY_NAME: group_schedule.date, } memcache.set("group%s" % group, dicdata, MAX_CACHING_TIME) logging.info("Get data for %s from db and save to cache" % group) return dicdata else: logging.info("Old data in DB for %s" % group) # в БД просрочено, попытка нового запроса schedhtml = bsuirparser.fetch(group) if not schedhtml: dicdata = { DICTDATA_SCHEDULE_KEY_NAME: group_schedule.schedule, DICTDATA_DATE_KEY_NAME: group_schedule.date, } memcache.set("group%s" % group, dicdata, MAX_CACHING_TIME) logging.info("Data in DB is too old, but site isn't respond. Old data %s save to cache" % group) return dicdata else: group_schedule.key.delete() studyweek = bsuirparser.parse(schedhtml) dbrec = GroupSchedule(group=group, schedule=studyweek) dbrec.put() dicdata = {DICTDATA_SCHEDULE_KEY_NAME: studyweek, DICTDATA_DATE_KEY_NAME: dbrec.date} memcache.set(group, dicdata, MAX_CACHING_TIME) logging.info("Get new data for %s and save to cache" % group) return dicdata else: # нет в БД schedhtml = bsuirparser.fetch(group) if not schedhtml: logging.error("Fetching %s failed" % group) return None else: studyweek = bsuirparser.parse(schedhtml) if not studyweek: logging.error("Parsing %s failed" % group) return None dbrec = GroupSchedule(group=group, schedule=studyweek) dbrec.put() dicdata = {DICTDATA_SCHEDULE_KEY_NAME: studyweek, DICTDATA_DATE_KEY_NAME: dbrec.date} memcache.set(group, dicdata, MAX_CACHING_TIME) logging.info("Get new data for %s and save to cache" % group) return dicdata
def get_group_data(group): """Return dictionary with schedule and date keys or None""" groupdata = memcache.get('group%s' % (group)) if groupdata is not None: # данные находятся в кэше logging.info("Get data for %s from cache" % group) return groupdata else: # Нет в кэше group_schedule = GroupSchedule.query(GroupSchedule.group == group).get() # TODO: .get_by_id(...) if group_schedule: if (datetime.datetime.now() - group_schedule.date).total_seconds() < MAX_DB_TIME: # в БД dicdata = { DICTDATA_SCHEDULE_KEY_NAME : group_schedule.schedule, DICTDATA_DATE_KEY_NAME : group_schedule.date } memcache.set('group%s' % group, dicdata, MAX_CACHING_TIME) logging.info("Get data for %s from db and save to cache" % group) return dicdata else: logging.info("Old data in DB for %s" % group) # в БД просрочено, попытка нового запроса schedhtml = bsuirparser.fetch(group) if not schedhtml: dicdata = { DICTDATA_SCHEDULE_KEY_NAME: group_schedule.schedule, DICTDATA_DATE_KEY_NAME: group_schedule.date } memcache.set('group%s' % group, dicdata , MAX_CACHING_TIME) logging.info("Data in DB is too old, but site isn't respond. Old data %s save to cache" % group) return dicdata else: group_schedule.key.delete() studyweek = bsuirparser.parse(schedhtml) dbrec = GroupSchedule(group=group, schedule=studyweek) dbrec.put() dicdata = { DICTDATA_SCHEDULE_KEY_NAME: studyweek, DICTDATA_DATE_KEY_NAME: dbrec.date } memcache.set(group, dicdata, MAX_CACHING_TIME) logging.info("Get new data for %s and save to cache" % group) return dicdata else: # нет в БД schedhtml = bsuirparser.fetch(group) if not schedhtml: logging.error("Fetching %s failed" % group) return None else: studyweek = bsuirparser.parse(schedhtml) if not studyweek: logging.error("Parsing %s failed" % group) return None dbrec = GroupSchedule(group=group, schedule=studyweek) dbrec.put() dicdata = { DICTDATA_SCHEDULE_KEY_NAME : studyweek, DICTDATA_DATE_KEY_NAME : dbrec.date } memcache.set(group, dicdata, MAX_CACHING_TIME) logging.info("Get new data for %s and save to cache" % group) return dicdata