def get(self, datasetID): dataset = Dataset.get_by_id(long(datasetID)) crawl = Crawl(dataset=dataset, status='QUEUED') crawl.put() ''' Queue the crawl immediately ''' crawl.queue(5) return webapp2.redirect('/datasets/' + datasetID)
def get(self, datasetID): dataset = Dataset.get_by_id(long(datasetID)) for crawl in Crawl.all().filter('dataset =', dataset).run(): crawl.delete() for dump in Dumpfile.all().filter('dataset =', dataset).run(): dump.delete() dataset.delete() logging.info('Deleted dataset ' + datasetID) return webapp2.redirect('/datasets')
def get(self, datasetID): startIn = self.request.get('start').split(':') if len(startIn) == 2: logging.info('Queuing harvest in ' + startIn[0] + ' hours ' + startIn[1] + ' minutes') seconds = int(startIn[0]) * 3600 + int(startIn[1]) * 60 dataset = Dataset.get_by_id(long(datasetID)) ''' TODO store 'interval' param in dataset object (if any) ''' crawl = Crawl(dataset=dataset, status='QUEUED') crawl.put() crawl.queue(seconds) return webapp2.redirect('/datasets/' + datasetID) else: ''' TODO decent error handling ''' logging.info('Invalid crawl time: ' + self.request.get('start')) return webapp2.redirect('/datasets/' + datasetID + '?error=true')
def get(self, datasetID): dataset = Dataset.get_by_id(long(datasetID)) self.response.write(json.dumps(dataset.toJSON()))
def get(self, datasetID): dataset = Dataset.get_by_id(long(datasetID)) self.render_response('datasets/datasets_show.html', **{'dataset':dataset})