def get(self, slug): s = utils.lookup_agency_alias(slug) if s: return self.redirect('/agency/%s/' % (s)) agency = utils.get_agency(slug) if not agency: raise tornado.web.HTTPError(404) messages =model.MessageAgency.all().filter('agency', agency).order('-date').fetch(1000) paginator = ObjectPaginator(messages, 10, 1) try: page = int(self.get_argument('page', '1')) except ValueError: page = 1 if page <= 0: page = 1 try: records = paginator.get_page(page-1) except: records = paginator.get_page(0) page = 1 self.render('agency.html', agency=agency, messages=records, paginator=paginator, next=paginator.has_next_page(page-1), previous=paginator.has_previous_page(page-1), previous_page_number=page-1, next_page_number=page+1, page=page)
def get(self, slug): s = utils.lookup_agency_alias(slug) if s: return self.redirect('/agency/%s/' % (s)) agency = utils.get_agency(slug) if not agency: raise tornado.web.HTTPError(404) message =model.MessageAgency.all().filter('agency', agency).order('-date').fetch(1) if message: return self.redirect(message[0].message.filelink()) raise tornado.web.HTTPError(404)
def get(self, slug): s = utils.lookup_agency_alias(slug) if s: return self.redirect('/agency/%s/' % (s)) agency = utils.get_agency(slug) if not agency: raise tornado.web.HTTPError(404) message =model.MessageAgency.all().filter('agency', agency).order('-date').fetch(1) if message: production = self.request.host == 'www.gtfs-data-exchange.com' return self.redirect(message[0].message.filelink(production=production)) raise tornado.web.HTTPError(404)
def get(self, slug): # TODO: should we even do this on an admin page? admin links should always be golden s = utils.lookup_agency_alias(slug) if s: return self.redirect('/a/edit/%s' % s) agency = utils.get_agency(slug) if not agency: raise tornado.web.HTTPError(404) crawl_urls = utils.get_agency_crawl_urls(agency) self.render('agency_edit.html', agency=agency, crawl_urls=crawl_urls, error=None)
def get(self, slug=None): if not slug: slug = self.get_argument('agency', None) if not slug: return self.api_error(404, 'MISSING_ARG_AGENCY') s = utils.lookup_agency_alias(slug) if s: slug = s agency = utils.get_agency(slug) if not agency: return self.api_error(404, 'AGENCY_NOT_FOUND') messages =model.MessageAgency.all().filter('agency', agency).order('-date').fetch(1000) messages = [message.message.json() for message in messages if message.hasFile] self.api_response(dict( agency=agency.json(), datafiles=messages ))
def get(self, slug=None): if not slug: slug = self.get_argument('agency', None) if not slug: return self.api_error(404, 'MISSING_ARG_AGENCY') s = utils.lookup_agency_alias(slug) if s: slug = s agency = utils.get_agency(slug) if not agency: return self.api_error(404, 'AGENCY_NOT_FOUND') messages = model.MessageAgency.all().filter( 'agency', agency).order('-date').fetch(1000) messages = [ message.message.json() for message in messages if message.hasFile ] self.api_response(dict(agency=agency.json(), datafiles=messages))
def get(self, user_or_agency=None, slug=None): self.set_header('Content-Type', 'application/atom+xml') base_url = self.request.protocol + "://" + self.request.host if not user_or_agency: messages = utils.get_recent_messages() self.render('atom.xml', user_or_agency=user_or_agency, messages=messages, base_url=base_url) elif user_or_agency == 'user': user = urllib.unquote(slug) if '@' in user: user = users.User(user) else: user = users.User(user+'@gmail.com') messages = model.Message.all().filter('date >', datetime.datetime.now()-datetime.timedelta(90)).filter('user ='******'-date').fetch(15) self.render('agency_atom.xml', user_or_agency=user_or_agency, messages=messages, base_url=base_url, user=str(user), agency=None) elif user_or_agency == 'agency': alias = utils.lookup_agency_alias(slug) if alias: return self.redirect('/%s/%s/feed' % (user_or_agency, alias)) agency = utils.get_agency(slug) messages = [x.message for x in model.MessageAgency.all().filter('agency =', agency).filter('date >', datetime.datetime.now()-datetime.timedelta(90)).order('-date').fetch(15)] self.render('agency_atom.xml', agency=agency, user_or_agency=user_or_agency, messages=messages, base_url=base_url, user='')
def get(self, userOrAgency=None, id=None): context = {'userOrAgency':userOrAgency, 'u':id, 'id':id} self.response.headers['Content-Type'] = 'application/atom+xml' if not userOrAgency: context['messages'] = model.Message.all().filter('date >', datetime.datetime.now()-datetime.timedelta(90)).order('-date').fetch(15) self.render('atom.xml', context) elif userOrAgency == 'user': import urllib user = urllib.unquote(id) if '@' in user: u = users.User(user) else: u = users.User(user+'@gmail.com') context['messages'] = model.Message.all().filter('date >', datetime.datetime.now()-datetime.timedelta(90)).filter('user ='******'-date').fetch(15) self.render('agency_atom.xml', **context) elif userOrAgency == 'agency': s = utils.lookup_agency_alias(id) if s: return self.redirect('/%s/%s/feed' % (userOrAgency, s)) agency = model.Agency.all().filter('slug =', id).get() context['agency'] = agency context['messages'] = [x.message for x in model.MessageAgency.all().filter('agency =', agency).filter('date >', datetime.datetime.now()-datetime.timedelta(90)).order('-date').fetch(15)] self.render('agency_atom.xml', **context)
def uploadfile(username, agencydata, comments, md5sum, sizeoffile, bounds): ## todo: cache if model.Message.all().filter('md5sum =', md5sum).count() > 0: raise UploadError('This file has previously been uploaded') ## todo: cache if model.SkipMd5.all().filter('md5sum =', md5sum).count() > 0: raise UploadError('This file has previously been uploaded') raw_agencies = utils.readfile(agencydata) if not raw_agencies: raise UploadError( "zip file did not contain any valid agencies in agency.txt.") ## save our msg new_message = model.Message(user=username, content=comments) new_message.hasFile = True memcache.delete('Message.recent') # new_message.filename = filename new_message.md5sum = md5sum new_message.size = sizeoffile new_message.max_lat = None new_message.max_lng = None new_message.min_lat = None new_message.min_lng = None if bounds: bounds_list = bounds.split("|") try: new_message.max_lat = float(bounds_list[0]) new_message.max_lng = float(bounds_list[1]) new_message.min_lat = float(bounds_list[2]) new_message.min_lng = float(bounds_list[3]) except ValueError: logging.error('failed to set bounds from %s' % bounds) new_message.put() d = datetime.datetime.now() datestr = d.strftime('%Y%m%d_%H%M') seen_agencies = [] for ag in raw_agencies: ## get from the db ## lookup by url first a = None if ag.get('agency_url', '').strip(): ## try to get via url first as it's more unique url = ag['agency_url'].strip() try: # TODO: use urlnorm url_parsed = urlparse.urlparse(url) if not url_parsed.path: url += '/' except: logging.exception('unable to parse url') a = model.Agency.all().filter('url =', url).get() if not a: slug = model.slugify(ag['agency_name'].strip()) s = utils.lookup_agency_alias(slug) if s: slug = s a = memcache.get('Agency.slug.%s' % slug) if not a: a = model.Agency.all().filter('slug =', slug).get() if a: a.messagecount += 1 a.lastupdate = datetime.datetime.now() a.put() memcache.set('Agency.slug.%s' % a.slug, a) if not a: a = model.Agency() a.name = ag['agency_name'].strip() a.url = ag.get('agency_url', '') a.messagecount = 1 a.put() memcache.delete('Agency.recent') utils.incrAgencyCount() if len(raw_agencies) == 1: new_message.filename = '%s_%s.zip' % (a.slug, datestr) new_message.put() # some zip files have the same url several times; only capture the first time that url is used if a in seen_agencies: continue seen_agencies.append(a) ma = model.MessageAgency() ma.agency = a ma.message = new_message ma.hasFile = True ma.put() memcache.delete('Agency.all') # because it has the cached last-update if not new_message.filename: new_message.filename = '%s_%s.zip' % (username.nickname(), datestr) new_message.put() # TODO: can we even hit this, since upload should only be called at a rate of once a minute anyway? recentFiles = model.Message.all().filter('hasFile =', True).filter( 'date >=', d.replace(second=0, microsecond=0)).count() if recentFiles > 1: # note we already saved *this* filename new_message.filename = new_message.filename.replace( '.zip', '_%d.zip' % recentFiles) new_message.put() ## send email to user ? return new_message.filename
def uploadfile(username, agencydata, comments, md5sum, sizeoffile, bounds): ## todo: cache if model.Message.all().filter('md5sum =', md5sum).count() >0: raise UploadError('This file has previously been uploaded') ## todo: cache if model.SkipMd5.all().filter('md5sum =', md5sum).count() >0: raise UploadError('This file has previously been uploaded') raw_agencies = utils.readfile(agencydata) if not raw_agencies: raise UploadError("zip file did not contain any valid agencies in agency.txt.") ## save our msg new_message = model.Message(user=username, content=comments) new_message.hasFile = True memcache.delete('Message.recent') # new_message.filename = filename new_message.md5sum = md5sum new_message.size = sizeoffile new_message.max_lat = None new_message.max_lng = None new_message.min_lat = None new_message.min_lng = None if bounds: bounds_list = bounds.split("|") try: new_message.max_lat = float(bounds_list[0]) new_message.max_lng = float(bounds_list[1]) new_message.min_lat = float(bounds_list[2]) new_message.min_lng = float(bounds_list[3]) except ValueError: logging.error('failed to set bounds from %s' % bounds) new_message.put() d = datetime.datetime.now() datestr = d.strftime('%Y%m%d_%H%M') seen_agencies = [] for ag in raw_agencies: ## get from the db ## lookup by url first a = None if ag.get('agency_url', '').strip(): ## try to get via url first as it's more unique url = ag['agency_url'].strip() try: # TODO: use urlnorm url_parsed = urlparse.urlparse(url) if not url_parsed.path: url += '/' except: logging.exception('unable to parse url') a = model.Agency.all().filter('url =', url).get() if not a: slug = model.slugify(ag['agency_name'].strip()) s = utils.lookup_agency_alias(slug) if s: slug = s a = memcache.get('Agency.slug.%s' % slug) if not a: a = model.Agency.all().filter('slug =', slug).get() if a: a.messagecount +=1 a.lastupdate = datetime.datetime.now() a.put() memcache.set('Agency.slug.%s' % a.slug, a) if not a: a = model.Agency() a.name = ag['agency_name'].strip() a.url = ag.get('agency_url', '') a.messagecount = 1 a.put() memcache.delete('Agency.recent') utils.incrAgencyCount() if len(raw_agencies) == 1: new_message.filename = '%s_%s.zip' % (a.slug, datestr) new_message.put() # some zip files have the same url several times; only capture the first time that url is used if a in seen_agencies: continue seen_agencies.append(a) ma= model.MessageAgency() ma.agency = a ma.message = new_message ma.hasFile=True ma.put() memcache.delete('Agency.all') # because it has the cached last-update if not new_message.filename: new_message.filename = '%s_%s.zip' % (username.nickname(), datestr) new_message.put() # TODO: can we even hit this, since upload should only be called at a rate of once a minute anyway? recentFiles = model.Message.all().filter('hasFile =', True).filter('date >=', d.replace(second=0, microsecond=0)).count() if recentFiles > 1: # note we already saved *this* filename new_message.filename = new_message.filename.replace('.zip', '_%d.zip' % recentFiles) new_message.put() ## send email to user ? return new_message.filename