Beispiel #1
0
 def get(self, datasetID):
     dataset = Dataset.get_by_id(long(datasetID))   
     crawl = Crawl(dataset=dataset, status='QUEUED')
     crawl.put()
     
     ''' Queue the crawl immediately '''
     crawl.queue(5)      
     return webapp2.redirect('/datasets/' + datasetID)
Beispiel #2
0
 def get(self, datasetID):
     startIn = self.request.get('start').split(':')
     if len(startIn) == 2:
         logging.info('Queuing harvest in ' + startIn[0] + ' hours ' + startIn[1] + ' minutes')
         seconds = int(startIn[0]) * 3600 + int(startIn[1]) * 60
         dataset = Dataset.get_by_id(long(datasetID))
         
         ''' TODO store 'interval' param in dataset object (if any) '''                    
         
         crawl = Crawl(dataset=dataset, status='QUEUED')
         crawl.put()
         crawl.queue(seconds)
         return webapp2.redirect('/datasets/' + datasetID)        
     else:
         ''' TODO decent error handling '''
         logging.info('Invalid crawl time: ' + self.request.get('start'))
         return webapp2.redirect('/datasets/' + datasetID + '?error=true')