def post(self): url = self.get_argument('orig_url', None) if self.get_argument('link', None): # link this to an agency agency = utils.get_agency(self.get_argument('link')) c = model.CrawlBaseUrl().all().filter('url =', url).get() c.agency = agency c.put() return self.redirect('/a/edit/' + agency.slug) if url: c = model.CrawlBaseUrl().all().filter('url =', url).get() else: c = model.CrawlBaseUrl() c.lastcrawled = datetime.datetime.now()-datetime.timedelta(days=365) c.next_crawl = datetime.datetime.now() + datetime.timedelta(minutes=10) c.enabled = True c.url = self.get_argument('url') c.recurse = int(self.get_argument('recurse')) c.download_as = self.get_argument('download_as', 'gtfs-archiver') c.show_url = self.get_argument('show_url', True) == 'True' c.post_text = self.get_argument('post_text', '') c.crawl_interval = int(self.get_argument('crawl_interval', 24)) c.put() self.redirect('/a/crawler')
def post(self): url = self.get_argument('orig_url', None) if self.get_argument('link', None): # link this to an agency agency = utils.get_agency(self.get_argument('link')) c = model.CrawlBaseUrl().all().filter('url =', url).get() c.agency = agency c.put() return self.redirect('/a/edit/' + agency.slug) if url: c = model.CrawlBaseUrl().all().filter('url =', url).get() else: c = model.CrawlBaseUrl() c.lastcrawled = datetime.datetime.now() - datetime.timedelta( days=365) c.next_crawl = datetime.datetime.now() + datetime.timedelta( minutes=10) c.enabled = True c.url = self.get_argument('url') c.recurse = int(self.get_argument('recurse')) c.download_as = self.get_argument('download_as', 'gtfs-archiver') c.show_url = self.get_argument('show_url', True) == 'True' c.post_text = self.get_argument('post_text', '') c.crawl_interval = int(self.get_argument('crawl_interval', 24)) c.put() self.redirect('/a/crawler')
def get(self, slug): s = utils.lookup_agency_alias(slug) if s: return self.redirect('/agency/%s/' % (s)) agency = utils.get_agency(slug) if not agency: raise tornado.web.HTTPError(404) messages =model.MessageAgency.all().filter('agency', agency).order('-date').fetch(1000) paginator = ObjectPaginator(messages, 10, 1) try: page = int(self.get_argument('page', '1')) except ValueError: page = 1 if page <= 0: page = 1 try: records = paginator.get_page(page-1) except: records = paginator.get_page(0) page = 1 self.render('agency.html', agency=agency, messages=records, paginator=paginator, next=paginator.has_next_page(page-1), previous=paginator.has_previous_page(page-1), previous_page_number=page-1, next_page_number=page+1, page=page)
def get(self, slug): s = utils.lookup_agency_alias(slug) if s: return self.redirect('/agency/%s/' % (s)) agency = utils.get_agency(slug) if not agency: raise tornado.web.HTTPError(404) message =model.MessageAgency.all().filter('agency', agency).order('-date').fetch(1) if message: return self.redirect(message[0].message.filelink()) raise tornado.web.HTTPError(404)
def get(self, slug): s = utils.lookup_agency_alias(slug) if s: return self.redirect('/agency/%s/' % (s)) agency = utils.get_agency(slug) if not agency: raise tornado.web.HTTPError(404) message =model.MessageAgency.all().filter('agency', agency).order('-date').fetch(1) if message: production = self.request.host == 'www.gtfs-data-exchange.com' return self.redirect(message[0].message.filelink(production=production)) raise tornado.web.HTTPError(404)
def get(self, slug): # TODO: should we even do this on an admin page? admin links should always be golden s = utils.lookup_agency_alias(slug) if s: return self.redirect('/a/edit/%s' % s) agency = utils.get_agency(slug) if not agency: raise tornado.web.HTTPError(404) crawl_urls = utils.get_agency_crawl_urls(agency) self.render('agency_edit.html', agency=agency, crawl_urls=crawl_urls, error=None)
def get(self, slug=None): if not slug: slug = self.get_argument('agency', None) if not slug: return self.api_error(404, 'MISSING_ARG_AGENCY') s = utils.lookup_agency_alias(slug) if s: slug = s agency = utils.get_agency(slug) if not agency: return self.api_error(404, 'AGENCY_NOT_FOUND') messages =model.MessageAgency.all().filter('agency', agency).order('-date').fetch(1000) messages = [message.message.json() for message in messages if message.hasFile] self.api_response(dict( agency=agency.json(), datafiles=messages ))
def get(self, slug=None): if not slug: slug = self.get_argument('agency', None) if not slug: return self.api_error(404, 'MISSING_ARG_AGENCY') s = utils.lookup_agency_alias(slug) if s: slug = s agency = utils.get_agency(slug) if not agency: return self.api_error(404, 'AGENCY_NOT_FOUND') messages = model.MessageAgency.all().filter( 'agency', agency).order('-date').fetch(1000) messages = [ message.message.json() for message in messages if message.hasFile ] self.api_response(dict(agency=agency.json(), datafiles=messages))
def get(self, user_or_agency=None, slug=None): self.set_header('Content-Type', 'application/atom+xml') base_url = self.request.protocol + "://" + self.request.host if not user_or_agency: messages = utils.get_recent_messages() self.render('atom.xml', user_or_agency=user_or_agency, messages=messages, base_url=base_url) elif user_or_agency == 'user': user = urllib.unquote(slug) if '@' in user: user = users.User(user) else: user = users.User(user+'@gmail.com') messages = model.Message.all().filter('date >', datetime.datetime.now()-datetime.timedelta(90)).filter('user ='******'-date').fetch(15) self.render('agency_atom.xml', user_or_agency=user_or_agency, messages=messages, base_url=base_url, user=str(user), agency=None) elif user_or_agency == 'agency': alias = utils.lookup_agency_alias(slug) if alias: return self.redirect('/%s/%s/feed' % (user_or_agency, alias)) agency = utils.get_agency(slug) messages = [x.message for x in model.MessageAgency.all().filter('agency =', agency).filter('date >', datetime.datetime.now()-datetime.timedelta(90)).order('-date').fetch(15)] self.render('agency_atom.xml', agency=agency, user_or_agency=user_or_agency, messages=messages, base_url=base_url, user='')
def post(self, slug): agency = utils.get_agency(slug) if not agency: raise tornado.web.HTTPError(404) if self.get_argument('action.recrawl', None): return self.render('generic.html', message='not implemented yet') elif self.get_argument('action.enable', None): url = self.get_argument('orig_url') c = model.CrawlBaseUrl().all().filter('url =', url).get() c.enabled = True c.put() return self.redirect('/a/edit/' + agency.slug) elif self.get_argument('action.disable', None): url = self.get_argument('orig_url') c = model.CrawlBaseUrl().all().filter('url =', url).get() c.enabled = False c.put() return self.redirect('/a/edit/' + agency.slug) elif self.get_argument('action.requeue', None): url = self.get_argument('orig_url') c = model.CrawlBaseUrl().all().filter('url =', url).get() c.next_crawl=datetime.datetime.now() c.put() return self.redirect('/a/crawler/' + c.download_as) elif self.get_argument('action.save.url', None): url = self.get_argument('orig_url', None) if url: c = model.CrawlBaseUrl().all().filter('url =', url).get() else: c = model.CrawlBaseUrl() c.enabled = True c.agency = agency c.lastcrawled = datetime.datetime.now() - datetime.timedelta(days=365) c.next_crawl = datetime.datetime.now() + datetime.timedelta(minutes=5) # c.agency = agency c.url = self.get_argument('url') c.recurse = int(self.get_argument('recurse')) c.download_as = self.get_argument('download_as', 'gtfs-archiver') c.crawl_interval = int(self.get_argument('crawl_interval', 24)) c.show_url = self.get_argument('show_url', True) == 'True' c.post_text = self.get_argument('post_text', '') c.put() return self.redirect('/a/edit/' + agency.slug) # agency.name = self.get_argument('name', agency.name) # agency.slug = self.get_argument('slug', agency.slug) agency.description = self.get_argument('description', agency.description) agency.url = self.get_argument('url', agency.url) agency.country_name = self.get_argument('country', agency.country_name).strip() agency.state_name = self.get_argument('state', agency.state_name).strip() agency.area_name = self.get_argument('area', agency.area_name).strip() agency.feed_baseurl = self.get_argument('feed', agency.feed_baseurl).strip() agency.license_url = self.get_argument('license', agency.license_url).strip() agency.is_official = self.get_argument('official', '0') == '1' # agency.lastupdate = datetime.datetime.now() # this is for the last message 'update' agency.put() # memcache.delete('Agency.slug.%s' % slug) memcache.delete('Agency.recent') memcache.delete('Agency.all') memcache.set('Agency.slug.%s' % agency.slug, agency) self.render('generic.html', message='Agency %s updated' % agency.name)