def get(self, request, uid=None, format=None): """ Re-runs the job. Gets the job_uid and current user from the request. Creates an instance of the ExportTaskRunner and calls run_task on it, passing the job_uid and user. Args: the http request Returns: the serialized run data. """ job_uid = request.query_params.get("job_uid", None) user = request.user if job_uid: # run the tasks job = Job.objects.get(uid=job_uid) task_runner = ExportTaskRunner() run = task_runner.run_task(job_uid=job_uid, user=user) if run: running = ExportRunSerializer(run, context={"request": request}) return Response(running.data, status=status.HTTP_202_ACCEPTED) else: return Response([{"detail": _("Failed to run Export")}], status.HTTP_400_BAD_REQUEST) else: return Response([{"detail": _("Export not found")}], status.HTTP_404_NOT_FOUND)
def create(self, request, format='json'): """ Re-runs the job. Gets the job_uid and current user from the request. Creates an instance of the ExportTaskRunner and calls run_task on it, passing the job_uid and user. Args: the http request Returns: the serialized run data. """ job_uid = request.query_params.get('job_uid', None) user = request.user if (job_uid): # run the tasks job = Job.objects.get(uid=job_uid) task_runner = ExportTaskRunner() run = task_runner.run_task(job_uid=job_uid, user=user) if run: running = ExportRunSerializer(run, context={'request': request}) return Response(running.data, status=status.HTTP_202_ACCEPTED) else: return Response([{ 'detail': _('Failed to run Export') }], status.HTTP_400_BAD_REQUEST) else: return Response([{ 'detail': _('Export not found') }], status.HTTP_404_NOT_FOUND)
def create(self, request, format='json'): """ runs the job. """ job_uid = request.query_params.get('job_uid', None) task_runner = ExportTaskRunner() task_runner.run_task(job_uid=job_uid, user=request.user) return Response({'status': 'OK'}, status=status.HTTP_201_CREATED)
def perform_create(self, serializer): if Job.objects.filter( created_at__gt=timezone.now() - timedelta(minutes=60), user=self.request.user).count() > 5: raise ValidationError( {"the_geom": ["You are rate limited to 5 exports per hour."]}) job = serializer.save() task_runner = ExportTaskRunner() task_runner.run_task(job_uid=str(job.uid))
def create(self, request, format='json'): """ runs the job. """ if ExportRun.objects.filter( created_at__gt=timezone.now() - timedelta(minutes=1), user=request.user).count() >= 1: return Response({'status': 'RATE_LIMITED'}, status=status.HTTP_400_BAD_REQUEST) job_uid = request.query_params.get('job_uid', None) task_runner = ExportTaskRunner() task_runner.run_task(job_uid=job_uid, user=request.user) return Response({'status': 'OK'}, status=status.HTTP_201_CREATED)
def get(self, request, uid=None, format=None): job_uid = request.QUERY_PARAMS.get('job_uid', None) if (job_uid): # run the tasks job = Job.objects.get(uid=job_uid) task_runner = ExportTaskRunner() run = task_runner.run_task(job_uid=job_uid) if run: running = ExportRunSerializer(run, context={'request': request}) return Response(running.data, status=status.HTTP_202_ACCEPTED) else: return Response([{'detail': _('Failed to run Export')}], status.HTTP_200_OK) else: return Response([{'detail': _('Export not found')}], status.HTTP_404_NOT_FOUND)
def handle(self, *args, **kwargs): now = timezone.now() for regioncls in [HDXExportRegion, PartnerExportRegion]: for region in regioncls.objects.exclude( schedule_period='disabled'): last_run = region.job.runs.last() if last_run: last_run_at = last_run.created_at else: last_run_at = datetime.fromtimestamp( 0, pytz.timezone('UTC')) delta = now - last_run_at # if it's been too long or it's the regularly-scheduled time if (delta > region.delta) or \ (region.schedule_period == '6hrs' and (now.hour - region.schedule_hour) % 6 == 0 and delta > timedelta(hours=2)) or \ (region.schedule_period == 'daily' and region.schedule_hour == now.hour and delta > timedelta(hours=2)) or \ (region.schedule_period == 'weekly' and (now.weekday() + 1) % 7 == 0 and region.schedule_hour == now.hour and delta > timedelta(hours=2)) or \ (region.schedule_period == 'monthly' and now.day == 1 and region.schedule_hour == now.hour and delta > timedelta(hours=2)): ExportTaskRunner().run_task(job_uid=region.job.uid, queue="celery-scheduled")
def get(self, request, uid=None, format=None): job_uid = request.QUERY_PARAMS.get('job_uid', None) user = request.user if (job_uid): # run the tasks job = Job.objects.get(uid=job_uid) task_runner = ExportTaskRunner() run = task_runner.run_task(job_uid=job_uid, user=user) if run: running = ExportRunSerializer(run, context={'request': request}) return Response(running.data, status=status.HTTP_202_ACCEPTED) else: return Response([{ 'detail': _('Failed to run Export') }], status.HTTP_400_BAD_REQUEST) else: return Response([{ 'detail': _('Export not found') }], status.HTTP_404_NOT_FOUND)
def run(*script_args): path = os.path.dirname(os.path.realpath(__file__)) # pull out the demo user user = User.objects.get(username='******') # create the test job bbox = Polygon.from_bbox((-10.85, 6.25, -10.62, 6.40)) # monrovia # bbox = Polygon.from_bbox((13.84,-33.87,34.05,-25.57)) #(w,s,e,n) horn of africa the_geom = GEOSGeometry(bbox, srid=4326) job = Job.objects.create(name='TestJob', description='Test description', user=user, the_geom=the_geom) region = Region.objects.get(name='Africa') job.region = region job.save() # add the format(s) formats = [ ExportFormat.objects.get(slug='obf'), ExportFormat.objects.get(slug='thematic') ] job.formats = formats job.save() # add the tags (defaults to hdm presets) parser = PresetParser(preset='./tasks/tests/files/hdm_presets.xml') tags_dict = parser.parse() for entry in tags_dict: tag = Tag.objects.create( key=entry['key'], value=entry['value'], geom_types=entry['geom_types'], data_model='HDM', job=job ) # run the export.. tasks processed on celery queue # results available at /api/runs url runner = ExportTaskRunner() runner.run_task(job_uid=str(job.uid))
def create(self, request, *args, **kwargs): serializer = self.get_serializer(data=request.data) if (serializer.is_valid()): # add the export formats formats = request.data.get('formats') tags = request.data.get('tags') preset = request.data.get('preset') translation = request.data.get('translation') transform = request.data.get('transform') featuresave = request.data.get('featuresave') featurepub = request.data.get('featurepub') export_formats = [] job = None for slug in formats: # would be good to accept either format slug or uuid here.. try: export_format = ExportFormat.objects.get(slug=slug) export_formats.append(export_format) except ExportFormat.DoesNotExist as e: logger.warn( 'Export format with uid: {0} does not exist'.format( slug)) if len(export_formats) > 0: # save the job and make sure it's committed before running tasks.. try: with transaction.atomic(): job = serializer.save() job.formats = export_formats if preset: # get the tags from the uploaded preset logger.debug('Found preset with uid: %s' % preset) config = ExportConfig.objects.get(uid=preset) job.configs.add(config) preset_path = config.upload.path logger.debug(config.upload.path) # use unfiltered preset parser parser = presets.UnfilteredPresetParser( preset=preset_path) tags_dict = parser.parse() for entry in tags_dict: tag = Tag.objects.create( name=entry['name'], key=entry['key'], value=entry['value'], geom_types=entry['geom_types'], data_model='PRESET', job=job) elif tags: # get tags from request for entry in tags: tag = Tag.objects.create( name=entry['name'], key=entry['key'], value=entry['value'], job=job, data_model=entry['data_model'], geom_types=entry['geom_types'], groups=entry['groups']) else: # use hdm preset as default tags path = os.path.dirname(os.path.realpath(__file__)) parser = presets.PresetParser(preset=path + '/hdm_presets.xml') tags_dict = parser.parse() for entry in tags_dict: tag = Tag.objects.create( name=entry['name'], key=entry['key'], value=entry['value'], geom_types=entry['geom_types'], data_model='HDM', job=job) # check for translation file if translation: config = ExportConfig.objects.get(uid=translation) job.configs.add(config) # check for transform file if transform: config = ExportConfig.objects.get(uid=transform) job.configs.add(config) except Error as e: error_data = OrderedDict() error_data['id'] = _('server_error') error_data[ 'message'] = 'Error creating export job: {0}'.format(e) return Response( error_data, status=status.HTTP_500_INTERNAL_SERVER_ERROR) else: error_data = OrderedDict() error_data['formats'] = [_('Invalid format provided.')] return Response(error_data, status=status.HTTP_400_BAD_REQUEST) # run the tasks task_runner = ExportTaskRunner() job_uid = str(job.uid) task_runner.run_task(job_uid=job_uid) running = JobSerializer(job, context={'request': request}) return Response(running.data, status=status.HTTP_202_ACCEPTED) else: return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
def create(self, request, *args, **kwargs): serializer = self.get_serializer(data=request.data) if (serializer.is_valid()): # add the export formats formats = request.data.get('formats') tags = request.data.get('tags') preset = request.data.get('preset') translation = request.data.get('translation') transform = request.data.get('transform') featuresave = request.data.get('featuresave') featurepub = request.data.get('featurepub') export_formats = [] job = None for slug in formats: # would be good to accept either format slug or uuid here.. try: export_format = ExportFormat.objects.get(slug=slug) export_formats.append(export_format) except ExportFormat.DoesNotExist as e: logger.warn('Export format with uid: {0} does not exist'.format(slug)) if len(export_formats) > 0: # save the job and make sure it's committed before running tasks.. try: with transaction.atomic(): job = serializer.save() job.formats = export_formats if preset: # get the tags from the uploaded preset logger.debug('Found preset with uid: %s' % preset) config = ExportConfig.objects.get(uid=preset) job.configs.add(config) preset_path = config.upload.path logger.debug(config.upload.path) # use unfiltered preset parser parser = presets.UnfilteredPresetParser(preset=preset_path) tags_dict = parser.parse() for entry in tags_dict: tag = Tag.objects.create( name=entry['name'], key=entry['key'], value=entry['value'], geom_types=entry['geom_types'], data_model='PRESET', job=job ) elif tags: # get tags from request for entry in tags: tag = Tag.objects.create( name=entry['name'], key=entry['key'], value=entry['value'], job=job, data_model=entry['data_model'], geom_types=entry['geom_types'], groups=entry['groups'] ) else: # use hdm preset as default tags path = os.path.dirname(os.path.realpath(__file__)) parser = presets.PresetParser(preset=path + '/hdm_presets.xml') tags_dict = parser.parse() for entry in tags_dict: tag = Tag.objects.create( name=entry['name'], key=entry['key'], value=entry['value'], geom_types=entry['geom_types'], data_model='HDM', job=job ) # check for translation file if translation: config = ExportConfig.objects.get(uid=translation) job.configs.add(config) # check for transform file if transform: config = ExportConfig.objects.get(uid=transform) job.configs.add(config) except Error as e: error_data = OrderedDict() error_data['id'] = _('server_error') error_data['message'] = 'Error creating export job: {0}'.format(e) return Response(error_data, status=status.HTTP_500_INTERNAL_SERVER_ERROR) else: error_data = OrderedDict() error_data['formats'] = [_('Invalid format provided.')] return Response(error_data, status=status.HTTP_400_BAD_REQUEST) # run the tasks task_runner = ExportTaskRunner() job_uid = str(job.uid) task_runner.run_task(job_uid=job_uid) running = JobSerializer(job, context={'request': request}) return Response(running.data, status=status.HTTP_202_ACCEPTED) else: return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
def perform_create(self, serializer): job = serializer.save() task_runner = ExportTaskRunner() task_runner.run_task(job_uid=str(job.uid))
def create(self, request, *args, **kwargs): """ Create a Job from the supplied request data. The request data is validated by *api.serializers.JobSerializer*. Associates the *Job* with required *ExportFormats*, *ExportConfig* and *Tags* Args: request: the HTTP request. *args: Variable length argument list. **kwargs: Arbitary keyword arguments. Returns: the newly created Job instance. Raises: ValidationError: in case of validation errors. """ serializer = self.get_serializer(data=request.data) if serializer.is_valid(): """Get the required data from the validated request.""" formats = request.data.get("formats") tags = request.data.get("tags") preset = request.data.get("preset") translation = request.data.get("translation") transform = request.data.get("transform") featuresave = request.data.get("featuresave") featurepub = request.data.get("featurepub") export_formats = [] job = None for slug in formats: # would be good to accept either format slug or uuid here.. try: export_format = ExportFormat.objects.get(slug=slug) export_formats.append(export_format) except ExportFormat.DoesNotExist as e: logger.warn("Export format with uid: {0} does not exist".format(slug)) if len(export_formats) > 0: """Save the job and make sure it's committed before running tasks.""" try: with transaction.atomic(): job = serializer.save() job.formats = export_formats if preset: """Get the tags from the uploaded preset.""" logger.debug("Found preset with uid: %s" % preset) config = ExportConfig.objects.get(uid=preset) job.configs.add(config) preset_path = config.upload.path """Use the UnfilteredPresetParser.""" parser = presets.UnfilteredPresetParser(preset=preset_path) tags_dict = parser.parse() for entry in tags_dict: tag = Tag.objects.create( name=entry["name"], key=entry["key"], value=entry["value"], geom_types=entry["geom_types"], data_model="PRESET", job=job, ) elif tags: """Get tags from request.""" for entry in tags: tag = Tag.objects.create( name=entry["name"], key=entry["key"], value=entry["value"], job=job, data_model=entry["data_model"], geom_types=entry["geom_types"], groups=entry["groups"], ) else: """ Use hdm preset as default tags if no preset or tags are provided in the request. """ path = os.path.dirname(os.path.realpath(__file__)) parser = presets.PresetParser(preset=path + "/presets/hdm_presets.xml") tags_dict = parser.parse() for entry in tags_dict: tag = Tag.objects.create( name=entry["name"], key=entry["key"], value=entry["value"], geom_types=entry["geom_types"], data_model="HDM", job=job, ) # check for translation file if translation: config = ExportConfig.objects.get(uid=translation) job.configs.add(config) # check for transform file if transform: config = ExportConfig.objects.get(uid=transform) job.configs.add(config) except Exception as e: error_data = OrderedDict() error_data["id"] = _("server_error") error_data["message"] = _("Error creating export job: %(error)s") % {"error": e} return Response(error_data, status=status.HTTP_500_INTERNAL_SERVER_ERROR) else: error_data = OrderedDict() error_data["formats"] = [_("Invalid format provided.")] return Response(error_data, status=status.HTTP_400_BAD_REQUEST) # run the tasks task_runner = ExportTaskRunner() job_uid = str(job.uid) task_runner.run_task(job_uid=job_uid) running = JobSerializer(job, context={"request": request}) return Response(running.data, status=status.HTTP_202_ACCEPTED) else: return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
def create(self, request, *args, **kwargs): """ Create a Job from the supplied request data. The request data is validated by *api.serializers.JobSerializer*. Associates the *Job* with required export formats, *ExportConfig* and *Tags* Args: request: the HTTP request. *args: Variable length argument list. **kwargs: Arbitary keyword arguments. Returns: the newly created Job instance. Raises: ValidationError: in case of validation errors. """ serializer = self.get_serializer(data=request.data) if (serializer.is_valid()): """Get the required data from the validated request.""" export_formats = request.data.get('formats') tags = request.data.get('tags') preset = request.data.get('preset') featuresave = request.data.get('featuresave') featurepub = request.data.get('featurepub') job = None if len(export_formats) > 0: """Save the job and make sure it's committed before running tasks.""" try: with transaction.atomic(): job = serializer.save() job.export_formats = export_formats if preset: """Get the tags from the uploaded preset.""" config = ExportConfig.objects.get(uid=preset) job.config = config job.save() preset_path = config.upload.path """Use the UnfilteredPresetParser.""" parser = presets.UnfilteredPresetParser( preset=preset_path) tags_dict = parser.parse() for entry in tags_dict: tag = Tag.objects.create( name=entry['name'], key=entry['key'], value=entry['value'], geom_types=entry['geom_types'], data_model='PRESET', job=job) elif tags: """Get tags from request.""" for entry in tags: tag = Tag.objects.create( name=entry['name'], key=entry['key'], value=entry['value'], job=job, data_model=entry['data_model'], geom_types=entry['geom_types'], groups=entry['groups']) else: """ Use hdm preset as default tags if no preset or tags are provided in the request. """ path = os.path.dirname(os.path.realpath(__file__)) parser = presets.PresetParser( preset=path + '/presets/hdm_presets.xml') tags_dict = parser.parse() for entry in tags_dict: tag = Tag.objects.create( name=entry['name'], key=entry['key'], value=entry['value'], geom_types=entry['geom_types'], data_model='HDM', job=job) except Exception as e: error_data = OrderedDict() error_data['id'] = _('server_error') error_data['message'] = _( 'Error creating export job: %(error)s') % { 'error': e } return Response( error_data, status=status.HTTP_500_INTERNAL_SERVER_ERROR) else: error_data = OrderedDict() error_data['formats'] = [_('Invalid format provided.')] return Response(error_data, status=status.HTTP_400_BAD_REQUEST) # run the tasks task_runner = ExportTaskRunner() job_uid = str(job.uid) task_runner.run_task(job_uid=job_uid) running = JobSerializer(job, context={'request': request}) return Response(running.data, status=status.HTTP_202_ACCEPTED) else: return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)