def get_schedule_json(url, channel, tzinfo=None): if not url in url_cache: sys.stderr.write("Downloading %s\n" % url) url_json = urllib2.urlopen(url).read() url_data = json.loads(url_json) # Convert the time/date values into datetime objects channel_url_data = [] for item in url_data: if item['fields']['location'] != ROOM_MAP[channel]: continue # no end time, generate one from start+duration... endtime = parser.parse(item['fields']['start']) + parse_duration(item['fields']['duration']) enddt = datetime_tz.smartparse(str(endtime), tzinfo) endtime = enddt.astimezone(pytz.utc) for value in item['fields']: if value not in ('start', 'end',): continue dt = datetime_tz.smartparse(item['fields'][value], tzinfo) item['fields'][value] = dt.astimezone(pytz.utc) channel_url_data.append(item) item['fields'].update(dict(end=endtime)) #url_cache[url] = url_data return channel_url_data
def get_schedule_json(url, channel, tzinfo=None): if not url in url_cache: sys.stderr.write("Downloading %s\n" % url) url_json = urllib2.urlopen(url).read() url_data = simplejson.loads(url_json) # Convert the time/date values into datetime objects channel_url_data = [] for item in url_data: if item['fields']['location'] != ROOM_MAP[channel]: continue # no end time, generate one from start+duration... endtime = parser.parse(item['fields']['start']) + parse_duration(item['fields']['duration']) enddt = datetime_tz.smartparse(str(endtime), tzinfo) endtime = enddt.astimezone(pytz.utc) for value in item['fields']: if value not in ('start', 'end',): continue dt = datetime_tz.smartparse(item['fields'][value], tzinfo) item['fields'][value] = dt.astimezone(pytz.utc) channel_url_data.append(item) item['fields'].update(dict(end=endtime)) #url_cache[url] = url_data return channel_url_data
def post(self, key=None): event_name = self.request.get('name') if key: try: key = long(key) event = models.Event.get_by_id(key) event.name = event_name # pylint: disable-msg=W0702 except (AssertionError, ValueError): self.redirect('/events') return else: #name is a required field; must populate now. Rest comes later. event = models.Event(name=event_name, text='', html='', start=datetime.now(), end=datetime.now()) inputtext = self.request.get('input') start_date = datetime_tz.smartparse(self.request.get('start')) end_date = datetime_tz.smartparse(self.request.get('end')) event.input = inputtext event.start = start_date.asdatetime() event.end = end_date.asdatetime() event.put() # We can't do this template subsitution until we have saved the event. try: plaintext = str(template.Template(inputtext).render( template.Context({ 'event': event, 'req': self.request, 'agenda': offers.get_event_agenda(event) }), )) html = markdown.markdown(plaintext, extensions).encode('utf-8') event.plaintext = plaintext event.html = html except Exception: sio = StringIO.StringIO() traceback.print_exc(file=sio) event.plaintext = sio.getvalue() logging.debug("e.a %s, e.n %s", event.announcement, event.name) event.put() self.redirect('%s/edit' % event.get_url())
def get_schedule_json(url, tzinfo=None): if not url in url_cache: sys.stderr.write("Downloading %s\n" % url) url_json = urllib2.urlopen(url).read() url_data = simplejson.loads(url_json) # Convert the time/date values into datetime objects for room in url_data: for item in url_data[room]: for value in item: if value not in ( 'start', 'end', ): continue dt = datetime_tz.smartparse(item[value], tzinfo) item[value] = dt.astimezone(pytz.utc) # If no end time, generate one from start+duration... url_cache[url] = url_data return url_cache[url]
def get_schedule_json(url, tzinfo=None): if not url in url_cache: sys.stderr.write("Downloading %s\n" % url) url_json = urllib2.urlopen(url).read() url_data = simplejson.loads(url_json) # Convert the time/date values into datetime objects for room in url_data: for item in url_data[room]: for value in item: if value not in ('start', 'end',): continue dt = datetime_tz.smartparse(item[value], tzinfo) item[value] = dt.astimezone(pytz.utc) # If no end time, generate one from start+duration... url_cache[url] = url_data return url_cache[url]