def get(self, request, pk=None, project_pk=None, tile_type=""): """ Get the metadata for this tasks's asset type """ task = self.get_and_check_task(request, pk) formula = self.request.query_params.get('formula') bands = self.request.query_params.get('bands') defined_range = self.request.query_params.get('range') boundaries_feature = self.request.query_params.get('boundaries') if formula == '': formula = None if bands == '': bands = None if defined_range == '': defined_range = None if boundaries_feature == '': boundaries_feature = None if boundaries_feature is not None: boundaries_feature = json.loads(boundaries_feature) try: expr, hrange = lookup_formula(formula, bands) if defined_range is not None: new_range = tuple(map(float, defined_range.split(",")[:2])) #Validate rescaling range if hrange is not None and (new_range[0] < hrange[0] or new_range[1] > hrange[1]): pass else: hrange = new_range except ValueError as e: raise exceptions.ValidationError(str(e)) pmin, pmax = 2.0, 98.0 raster_path = get_raster_path(task, tile_type) if not os.path.isfile(raster_path): raise exceptions.NotFound() try: with COGReader(raster_path) as src: band_count = src.dataset.meta['count'] if boundaries_feature is not None: boundaries_cutline = create_cutline( src.dataset, boundaries_feature, CRS.from_string('EPSG:4326')) boundaries_bbox = featureBounds(boundaries_feature) else: boundaries_cutline = None boundaries_bbox = None if has_alpha_band(src.dataset): band_count -= 1 nodata = None # Workaround for https://github.com/OpenDroneMap/WebODM/issues/894 if tile_type == 'orthophoto': nodata = 0 histogram_options = {"bins": 255, "range": hrange} if expr is not None: if boundaries_cutline is not None: data, mask = src.preview( expression=expr, vrt_options={'cutline': boundaries_cutline}) else: data, mask = src.preview(expression=expr) data = numpy.ma.array(data) data.mask = mask == 0 stats = { str(b + 1): raster_stats(data[b], percentiles=(pmin, pmax), bins=255, range=hrange) for b in range(data.shape[0]) } stats = {b: ImageStatistics(**s) for b, s in stats.items()} metadata = RioMetadata(statistics=stats, **src.info().dict()) else: if (boundaries_cutline is not None) and (boundaries_bbox is not None): metadata = src.metadata( pmin=pmin, pmax=pmax, hist_options=histogram_options, nodata=nodata, bounds=boundaries_bbox, vrt_options={'cutline': boundaries_cutline}) else: metadata = src.metadata(pmin=pmin, pmax=pmax, hist_options=histogram_options, nodata=nodata) info = json.loads(metadata.json()) except IndexError as e: # Caught when trying to get an invalid raster metadata raise exceptions.ValidationError( "Cannot retrieve raster metadata: %s" % str(e)) # Override min/max if hrange: for b in info['statistics']: info['statistics'][b]['min'] = hrange[0] info['statistics'][b]['max'] = hrange[1] cmap_labels = { "viridis": "Viridis", "jet": "Jet", "terrain": "Terrain", "gist_earth": "Earth", "rdylgn": "RdYlGn", "rdylgn_r": "RdYlGn (Reverse)", "spectral": "Spectral", "spectral_r": "Spectral (Reverse)", "discrete_ndvi": "Contrast NDVI", "better_discrete_ndvi": "Custom NDVI Index", "rplumbo": "Rplumbo (Better NDVI)", "pastel1": "Pastel", } colormaps = [] algorithms = [] if tile_type in ['dsm', 'dtm']: colormaps = ['viridis', 'jet', 'terrain', 'gist_earth', 'pastel1'] elif formula and bands: colormaps = [ 'rdylgn', 'spectral', 'rdylgn_r', 'spectral_r', 'rplumbo', 'discrete_ndvi', 'better_discrete_ndvi' ] algorithms = *get_algorithm_list(band_count), info['color_maps'] = [] info['algorithms'] = algorithms if colormaps: for cmap in colormaps: try: info['color_maps'].append({ 'key': cmap, 'color_map': colormap.get(cmap).values(), 'label': cmap_labels.get(cmap, cmap) }) except FileNotFoundError: raise exceptions.ValidationError( "Not a valid color_map value: %s" % cmap) info['name'] = task.name info['scheme'] = 'xyz' info['tiles'] = [ get_tile_url(task, tile_type, self.request.query_params) ] if info['maxzoom'] < info['minzoom']: info['maxzoom'] = info['minzoom'] info['maxzoom'] += ZOOM_EXTRA_LEVELS info['minzoom'] -= ZOOM_EXTRA_LEVELS info['bounds'] = {'value': src.bounds, 'crs': src.dataset.crs} return Response(info)
def validate_username(self, value): # print(self) #当前实例的反序列化器,value是username的值 # 自定义用户名校验 if '1' in value: raise exceptions.ValidationError('用户名有误') return value
def validate_score(self, value): if value < 0: raise exceptions.ValidationError( error.SUBMIT_SCORE_MUST_BE_GREATER_THAN_0) return value
def validate_email_existance(value): if not User.objects.filter(email=value).exists(): raise exceptions.ValidationError( 'Email {0} does not exist'.format(value))
def get_key(uuid): try: return uuid.split("-")[1] except (ValueError, IndexError): raise exceptions.ValidationError("Malformed UUID")
def validate_name(self, value): print('value', value) if 'j' in value.lower(): #名字中不能存在j,这里仅仅是测试局部验证 raise exceptions.ValidationError('名字非法') return value
def post(self, request, *args, **kwargs): serializer = self.serializer_class(data=request.data) serializer.is_valid(raise_exception=True) username = serializer.validated_data["username"] # before we continue, delete all existing expired tokens password_reset_token_validation_time = get_password_reset_token_expiry_time() # datetime.now minus expiry hours now_minus_expiry_time = timezone.now() - timedelta(hours=password_reset_token_validation_time) # delete all tokens where created_at < now - 24 hours clear_expired(now_minus_expiry_time) # find a user users = User.objects.filter(**{"{}__exact".format(get_password_reset_lookup_field()): username}) active_user_found = False # iterate over all users and check if there is any user that is active # also check whether the password can be changed (is useable), as there could be users that are not allowed # to change their password (e.g., LDAP user) for user in users: if user.eligible_for_reset(): active_user_found = True # No active user found, raise a validation error # but not if DJANGO_REST_PASSWORDRESET_NO_INFORMATION_LEAKAGE == True if not active_user_found and not getattr(settings, "DJANGO_REST_PASSWORDRESET_NO_INFORMATION_LEAKAGE", False): raise exceptions.ValidationError( { "email": [ _( "There is no active user associated with this e-mail address or the password can not be changed" ) ], } ) # last but not least: iterate over all users that are active and can change their password # and create a Reset Password Token and send a signal with the created token for user in users: if user.eligible_for_reset(): # define the token as none for now token = None # check if the user already has a token if user.password_reset_tokens.all().count() > 0: # yes, already has a token, re-use this token token = user.password_reset_tokens.all()[0] else: # no token exists, generate a new token token = ResetPasswordToken.objects.create( user=user, user_agent=request.META.get(HTTP_USER_AGENT_HEADER, ""), ip_address=request.META.get(HTTP_IP_ADDRESS_HEADER, ""), ) # send a signal that the password token was created # let whoever receives this signal handle sending the email for the password reset reset_password_token_created.send(sender=self.__class__, instance=self, reset_password_token=token) # done return Response({"status": "OK"})
def validate_created_by(self, value): for user in _djauthmods.User.objects.filter(username=value): return user raise _drfexcs.ValidationError('User "{}" doesn\'t exist.'.format(value))
def validate(self, attrs): if 'data' not in attrs and 'count' not in attrs: raise exceptions.ValidationError( "Either the `data` or `count` parameters must be provided." ) return attrs
def update(self, instance, validated_data): if "children" in validated_data: raise exceptions.ValidationError("Children not allowed to update.") return self._do_with_vars("update", instance, validated_data=validated_data)
def validate_username(self,value): print(value) if "1" in value: raise exceptions.ValidationError("用户名有误") return value
def run_validation(self, data=serializers.empty): validated_data = super(CreateUserSerializer, self).run_validation(data) if validated_data['password'] != validated_data.pop('password2', None): raise exceptions.ValidationError('Passwords do not match.') return validated_data
def post(self, request, pk=None, project_pk=None, asset_type=None): """ Export assets (orthophoto, DEMs, etc.) after applying scaling formulas, shading, reprojections """ task = self.get_and_check_task(request, pk) formula = request.data.get('formula') bands = request.data.get('bands') rescale = request.data.get('rescale') export_format = request.data.get( 'format', 'laz' if asset_type == 'georeferenced_model' else 'gtiff') epsg = request.data.get('epsg') color_map = request.data.get('color_map') hillshade = request.data.get('hillshade') if formula == '': formula = None if bands == '': bands = None if rescale == '': rescale = None if epsg == '': epsg = None if color_map == '': color_map = None if hillshade == '': hillshade = None expr = None if asset_type in [ 'orthophoto', 'dsm', 'dtm' ] and not export_format in ['gtiff', 'gtiff-rgb', 'jpg', 'png', 'kmz']: raise exceptions.ValidationError( _("Unsupported format: %(value)s") % {'value': export_format}) if asset_type == 'georeferenced_model' and not export_format in [ 'laz', 'las', 'ply', 'csv' ]: raise exceptions.ValidationError( _("Unsupported format: %(value)s") % {'value': export_format}) # Default color map, hillshade if asset_type in ['dsm', 'dtm'] and export_format != 'gtiff': if color_map is None: color_map = 'viridis' if hillshade is None: hillshade = 6 if color_map is not None: try: colormap.get(color_map) except InvalidColorMapName: raise exceptions.ValidationError( _("Not a valid color_map value")) if epsg is not None: try: epsg = int(epsg) except ValueError: raise exceptions.ValidationError( _("Invalid EPSG code: %(value)s") % {'value': epsg}) if (formula and not bands) or (not formula and bands): raise exceptions.ValidationError( _("Both formula and bands parameters are required")) if formula and bands: try: expr, _discard_ = lookup_formula(formula, bands) except ValueError as e: raise exceptions.ValidationError(str(e)) if export_format in ['gtiff-rgb', 'jpg', 'png']: if formula is not None and rescale is None: rescale = "-1,1" if export_format == 'gtiff': rescale = None if rescale is not None: rescale = rescale.replace("%2C", ",") try: rescale = list(map(float, rescale.split(","))) except ValueError: raise exceptions.ValidationError( _("Invalid rescale value: %(value)s") % {'value': rescale}) if hillshade is not None: try: hillshade = float(hillshade) if hillshade < 0: raise Exception("Hillshade must be > 0") except: raise exceptions.ValidationError( _("Invalid hillshade value: %(value)s") % {'value': hillshade}) if asset_type == 'georeferenced_model': url = get_pointcloud_path(task) else: url = get_raster_path(task, asset_type) if not os.path.isfile(url): raise exceptions.NotFound() if epsg is not None and task.epsg is None: raise exceptions.ValidationError( _("Cannot use epsg on non-georeferenced dataset")) # Strip unsafe chars, append suffix extension = extension_for_export_format(export_format) filename = "{}{}.{}".format( get_asset_download_filename(task, asset_type), "-{}".format(formula) if expr is not None else "", extension) if asset_type in ['orthophoto', 'dsm', 'dtm']: # Shortcut the process if no processing is required if export_format == 'gtiff' and (epsg == task.epsg or epsg is None) and expr is None: return Response({ 'url': '/api/projects/{}/tasks/{}/download/{}.tif'.format( task.project.id, task.id, asset_type), 'filename': filename }) else: celery_task_id = export_raster.delay(url, epsg=epsg, expression=expr, format=export_format, rescale=rescale, color_map=color_map, hillshade=hillshade, asset_type=asset_type, name=task.name).task_id return Response({ 'celery_task_id': celery_task_id, 'filename': filename }) elif asset_type == 'georeferenced_model': # Shortcut the process if no processing is required if export_format == 'laz' and (epsg == task.epsg or epsg is None): return Response({ 'url': '/api/projects/{}/tasks/{}/download/{}.laz'.format( task.project.id, task.id, asset_type), 'filename': filename }) else: celery_task_id = export_pointcloud.delay( url, epsg=epsg, format=export_format).task_id return Response({ 'celery_task_id': celery_task_id, 'filename': filename })
def get(self, request, pk=None, project_pk=None, tile_type="", z="", x="", y="", scale=1): """ Get a tile image """ task = self.get_and_check_task(request, pk) z = int(z) x = int(x) y = int(y) scale = int(scale) ext = "png" driver = "jpeg" if ext == "jpg" else ext indexes = None nodata = None rgb_tile = None formula = self.request.query_params.get('formula') bands = self.request.query_params.get('bands') rescale = self.request.query_params.get('rescale') color_map = self.request.query_params.get('color_map') hillshade = self.request.query_params.get('hillshade') boundaries_feature = self.request.query_params.get('boundaries') if boundaries_feature == '': boundaries_feature = None if boundaries_feature is not None: try: boundaries_feature = json.loads(boundaries_feature) except json.JSONDecodeError: raise exceptions.ValidationError( _("Invalid boundaries parameter")) if formula == '': formula = None if bands == '': bands = None if rescale == '': rescale = None if color_map == '': color_map = None if hillshade == '' or hillshade == '0': hillshade = None try: expr, _discard_ = lookup_formula(formula, bands) except ValueError as e: raise exceptions.ValidationError(str(e)) if tile_type in ['dsm', 'dtm'] and rescale is None: rescale = "0,1000" if tile_type == 'orthophoto' and rescale is None: rescale = "0,255" if tile_type in ['dsm', 'dtm'] and color_map is None: color_map = "gray" if tile_type == 'orthophoto' and formula is not None: if color_map is None: color_map = "gray" if rescale is None: rescale = "-1,1" if nodata is not None: nodata = np.nan if nodata == "nan" else float(nodata) tilesize = scale * 256 url = get_raster_path(task, tile_type) if not os.path.isfile(url): raise exceptions.NotFound() with COGReader(url) as src: if not src.tile_exists(z, x, y): raise exceptions.NotFound(_("Outside of bounds")) with COGReader(url) as src: minzoom, maxzoom = get_zoom_safe(src) has_alpha = has_alpha_band(src.dataset) if z < minzoom - ZOOM_EXTRA_LEVELS or z > maxzoom + ZOOM_EXTRA_LEVELS: raise exceptions.NotFound() if boundaries_feature is not None: try: boundaries_cutline = create_cutline( src.dataset, boundaries_feature, CRS.from_string('EPSG:4326')) except: raise exceptions.ValidationError(_("Invalid boundaries")) else: boundaries_cutline = None # Handle N-bands datasets for orthophotos (not plant health) if tile_type == 'orthophoto' and expr is None: ci = src.dataset.colorinterp # More than 4 bands? if len(ci) > 4: # Try to find RGBA band order if ColorInterp.red in ci and \ ColorInterp.green in ci and \ ColorInterp.blue in ci: indexes = ( ci.index(ColorInterp.red) + 1, ci.index(ColorInterp.green) + 1, ci.index(ColorInterp.blue) + 1, ) else: # Fallback to first three indexes = ( 1, 2, 3, ) elif has_alpha: indexes = non_alpha_indexes(src.dataset) # Workaround for https://github.com/OpenDroneMap/WebODM/issues/894 if nodata is None and tile_type == 'orthophoto': nodata = 0 resampling = "nearest" padding = 0 if tile_type in ["dsm", "dtm"]: resampling = "bilinear" padding = 16 try: with COGReader(url) as src: if expr is not None: if boundaries_cutline is not None: tile = src.tile( x, y, z, expression=expr, tilesize=tilesize, nodata=nodata, padding=padding, resampling_method=resampling, vrt_options={'cutline': boundaries_cutline}) else: tile = src.tile(x, y, z, expression=expr, tilesize=tilesize, nodata=nodata, padding=padding, resampling_method=resampling) else: if boundaries_cutline is not None: tile = src.tile( x, y, z, tilesize=tilesize, nodata=nodata, padding=padding, resampling_method=resampling, vrt_options={'cutline': boundaries_cutline}) else: tile = src.tile(x, y, z, indexes=indexes, tilesize=tilesize, nodata=nodata, padding=padding, resampling_method=resampling) except TileOutsideBounds: raise exceptions.NotFound(_("Outside of bounds")) if color_map: try: colormap.get(color_map) except InvalidColorMapName: raise exceptions.ValidationError( _("Not a valid color_map value")) intensity = None try: rescale_arr = list(map(float, rescale.split(","))) except ValueError: raise exceptions.ValidationError(_("Invalid rescale value")) options = img_profiles.get(driver, {}) if hillshade is not None: try: hillshade = float(hillshade) if hillshade <= 0: hillshade = 1.0 except ValueError: raise exceptions.ValidationError(_("Invalid hillshade value")) if tile.data.shape[0] != 1: raise exceptions.ValidationError( _("Cannot compute hillshade of non-elevation raster (multiple bands found)" )) delta_scale = (maxzoom + ZOOM_EXTRA_LEVELS + 1 - z) * 4 dx = src.dataset.meta["transform"][0] * delta_scale dy = -src.dataset.meta["transform"][4] * delta_scale ls = LightSource(azdeg=315, altdeg=45) # Hillshading is not a local tile operation and # requires neighbor tiles to be rendered seamlessly elevation = get_elevation_tiles(tile.data[0], url, x, y, z, tilesize, nodata, resampling, padding) intensity = ls.hillshade(elevation, dx=dx, dy=dy, vert_exag=hillshade) intensity = intensity[tilesize:tilesize * 2, tilesize:tilesize * 2] if intensity is not None: rgb = tile.post_process(in_range=(rescale_arr, )) if colormap: rgb, _discard_ = apply_cmap(rgb.data, colormap.get(color_map)) if rgb.data.shape[0] != 3: raise exceptions.ValidationError( _("Cannot process tile: intensity image provided, but no RGB data was computed." )) intensity = intensity * 255.0 rgb = hsv_blend(rgb, intensity) if rgb is not None: return HttpResponse(render(rgb, tile.mask, img_format=driver, **options), content_type="image/{}".format(ext)) if color_map is not None: return HttpResponse( tile.post_process(in_range=(rescale_arr, )).render( img_format=driver, colormap=colormap.get(color_map), **options), content_type="image/{}".format(ext)) return HttpResponse(tile.post_process(in_range=(rescale_arr, )).render( img_format=driver, **options), content_type="image/{}".format(ext))
def raise_exception(value): raise exceptions.ValidationError('Raised error')
def perform_update(self, serializer): if serializer.instance.status != DECISION_STATUS[2][0]: raise exceptions.ValidationError('The decision is not solving') serializer.save()
def validate_event(self, event): if event not in settings.HOOK_EVENTS: err_msg = "Unexpected event {}".format(event) raise exceptions.ValidationError(detail=err_msg, code=400) return event
def update(self, request, *args, **kwargs): partial = kwargs.pop('partial', False) instance = self.get_object() serializer = self.get_serializer(instance, data=request.data, partial=partial) serializer.is_valid(raise_exception=True) existing_participation = Participation.objects.filter( participation_id=kwargs['pk']) existing_participation_course_id = existing_participation.values( )[0]['participation_course_id_id'] existing_participation_course_phase = existing_participation.values( )[0]['participation_course_phase'] relevant_course = Course.objects.filter( course_id=existing_participation_course_id) relevant_course_phases = eval( relevant_course.values()[0]['course_phases']) relevant_course_phases_timed = eval( relevant_course.values()[0]['course_phases_timed']) relevant_course_phases_nontimed = eval( relevant_course.values()[0]['course_phases_nontimed']) requested_course_phase = int( request.data['participation_course_phase']) try: requested_course_phase_name = relevant_course_phases[ requested_course_phase] except: # Ensure that the requested course phase is within the range of available course phases message_valid_course_phases = "" for i in range(0, len(relevant_course_phases)): message_valid_course_phases += str( i) + ": '" + relevant_course_phases[i] + ( "', " if i != len(relevant_course_phases) - 1 else "'. ") message_course_phase_invalid_value = "The requested course phase is not one of the available phases: " + \ message_valid_course_phases + \ "Provide one of the integer numbers, e.g. participation_course_phase=0 for phase " + \ "'" + relevant_course_phases[0] + "'" if (int(request.data['participation_course_phase']) not in [p for p in range(0, len(relevant_course_phases))]): raise exceptions.ValidationError( detail=message_course_phase_invalid_value) existing_participation_course_phase_name = relevant_course_phases[ existing_participation_course_phase] # Ensure that users may only jump between course phases within one course if (int(request.data['participation_course_id']) != existing_participation_course_id): # Raise Error message = "Users may only jump between phases within one course. If current course shall be exited, delete unwanted participation first by calling participations/delete/." raise exceptions.ValidationError(detail=message) self.perform_update(serializer) # Call runtime update, ONLY with reset_runtime=True IF phase before was nontimed, and new phase is timed. course_id_list = [existing_participation_course_id] db_entries_update = DbEntriesUpdate() # If user changes from timed to nontimed phase, request reset_runtime (will be followed if no other users in timed) if ((requested_course_phase_name in relevant_course_phases_timed and existing_participation_course_phase_name in relevant_course_phases_nontimed)): db_entries_update.update_course_time(course_id_list, True) # Otherwise, do not request reset_runtime else: db_entries_update.update_course_time(course_id_list, False) if getattr(instance, '_prefetched_objects_cache', None): # If 'prefetch_related' has been applied to a queryset, we need to # forcibly invalidate the prefetch cache on the instance. instance._prefetched_objects_cache = {} return Response(serializer.data)
def validate(self, attrs): pwd = attrs.get('pwd') re_pwd = attrs.pop('re_pwd') if pwd != re_pwd: raise exceptions.ValidationError({'pwd': '两次密码不一样'}) return attrs
def is_pending_request(request, view, obj=None): if obj and obj.request.state != models.ExpertRequest.States.PENDING: raise exceptions.ValidationError(_('Expert request should be in pending state.'))
def validate_username(self, value): if "1" in value: raise exceptions.ValidationError("用户名异常") return value
def _validate_data_source(data_source): data_sources = get_data_sources(dic=True) if data_source not in data_sources: raise exceptions.ValidationError( f'Invalid data_source: "{data_source}"')
def get_shop_id(uuid): try: return int(uuid.split("-")[0]) except ValueError: raise exceptions.ValidationError("Malformed UUID")
def _get_txt_data(self, txt_body_lines, defalut_data): # 单选、判断, 多选 is_multiple = theory_models.ChoiceTask.TopicProblem.SINGLE is_last = False # 每一题的options rows = [] json_row = {} option_json = OrderedDict() last_lines = txt_body_lines[-1] for number, line in enumerate(txt_body_lines): if last_lines == line: is_last = True try: line = line.decode('gbk').decode('utf8').strip() except: line = line.decode('utf8').strip() # 确定题型 if re.match('[一二三四五六七]、多项选择题'.decode('utf8'), line): is_multiple = theory_models.ChoiceTask.TopicProblem.MULTIPLE continue elif re.match('([一二三四五六七]、单项选择题)'.decode('utf8'), line): is_multiple = theory_models.ChoiceTask.TopicProblem.SINGLE continue elif re.match('[一二三四五六七]、判断题'.decode('utf8'), line): is_multiple = theory_models.ChoiceTask.TopicProblem.JUDGMENT continue # 匹配content match_group = re.match('\d+、(.+?)[(\(]([A-Z]+)[)\)]$'.decode('utf8'), line) if is_multiple == theory_models.ChoiceTask.TopicProblem.JUDGMENT: match_group = re.match('\d+、(.+?)[(\(]([AB])[)\)]$'.decode('utf8'), line) # 匹配选项 option_match_group = re.match("([A-Z]+)、(.+)".decode('utf8'), line) if match_group: # 上一题结束, 下一题开始, 第一题, 最后一题 if json_row: json_row, option_json = self._combined_data(rows, json_row, option_json, self.defautlOption, defalut_data) content = match_group.group(1) answer = match_group.group(2) if is_multiple == theory_models.ChoiceTask.TopicProblem.SINGLE and len(answer) != 1: raise exceptions.ValidationError(TheoryResError.ACCURATE_FORMAT_ERROR_NO_COL.format(number + 1)) if len(answer) > 1: # is_multiple = True answer = "|".join(answer) json_row['content'] = content json_row['answer'] = answer json_row['multiple'] = is_multiple elif option_match_group: option_tab = option_match_group.group(1) option_content = option_match_group.group(2) option_json[option_tab] = option_content elif line: # 不为空行就是错的 raise exceptions.ValidationError(TheoryResError.ACCURATE_FORMAT_ERROR_NO_COL.format(number + 1)) if is_last and json_row: # 导入最后一次数据 json_row, option_json = self._combined_data(rows, json_row, option_json, self.defautlOption, defalut_data) return rows
def delete_charts(self, request): chart_ids = request.data.get('chart_ids') if not chart_ids: raise exceptions.ValidationError('缺少chart_ids参数') ShoppingChart.objects.filter(id__in=chart_ids).update(status='DELETED') return Response('ok')
def sub_perform_create(self, serializer): if theory_models.ChoiceCategory.objects.filter( cn_name=serializer.validated_data['cn_name']).exists(): raise exceptions.ValidationError({'cn_name': [TaskCategoryError.NAME_HAVE_EXISTED]}) serializer.save() return True
def list(self, request, project): repository = models.Repository.objects.get(name=project) signature_hashes = request.query_params.getlist( "signatures") # deprecated signature_ids = request.query_params.getlist("signature_id") push_ids = request.query_params.getlist("push_id") try: job_ids = [ int(job_id) for job_id in request.query_params.getlist("job_id") ] except ValueError: return Response( {"message": "Job id(s) must be specified as integers"}, status=HTTP_400_BAD_REQUEST) if not (signature_ids or signature_hashes or push_ids or job_ids): raise exceptions.ValidationError('Need to specify either ' 'signature_id, signatures, ' 'push_id, or job_id') if signature_ids and signature_hashes: raise exceptions.ValidationError( 'Can\'t specify both signature_id ' 'and signatures in same query') datums = PerformanceDatum.objects.filter( repository=repository).select_related('signature', 'push') if signature_hashes: signature_ids = PerformanceSignature.objects.filter( repository=repository, signature_hash__in=signature_hashes).values_list('id', flat=True) datums = datums.filter(signature__id__in=list(signature_ids)) elif signature_ids: datums = datums.filter(signature__id__in=list(signature_ids)) if push_ids: datums = datums.filter(push_id__in=push_ids) if job_ids: datums = datums.filter(job_id__in=job_ids) frameworks = request.query_params.getlist('framework') if frameworks: datums = datums.filter(signature__framework__in=frameworks) interval = request.query_params.get('interval') start_date = request.query_params.get( 'start_date') # 'YYYY-MM-DDTHH:MM:SS end_date = request.query_params.get( 'end_date') # 'YYYY-MM-DDTHH:MM:SS' if interval and (start_date or end_date): return Response( { "message": "Provide either interval only -or- start (and end) date" }, status=HTTP_400_BAD_REQUEST) if interval: datums = datums.filter( push_timestamp__gt=datetime.datetime.utcfromtimestamp( int(time.time() - int(interval)))) if start_date: datums = datums.filter(push_timestamp__gt=start_date) if end_date: datums = datums.filter(push_timestamp__lt=end_date) ret = defaultdict(list) values_list = datums.values_list('id', 'signature_id', 'signature__signature_hash', 'job_id', 'push_id', 'push_timestamp', 'value', 'push__revision') for (id, signature_id, signature_hash, job_id, push_id, push_timestamp, value, push__revision) in values_list: ret[signature_hash].append({ 'id': id, 'signature_id': signature_id, 'job_id': job_id, 'push_id': push_id, 'revision': push__revision, 'push_timestamp': int(time.mktime(push_timestamp.timetuple())), 'value': round(value, 2) # round to 2 decimal places }) return Response(ret)
def validate_new_password(self, value): password = self.initial_data.get('password') if password == value: detail = 'New password should be different from current password.' raise exceptions.ValidationError(detail=detail, ) return self.validate_password_value(value)
def validate_user(self, value): exists = models.UserInfo.objects.filter(id=value).exists() if not exists: raise exceptions.ValidationError('用户不存在') return value
def get(self, request, pk=None, project_pk=None, tile_type="", z="", x="", y="", scale=1): """ Get a tile image """ task = self.get_and_check_task(request, pk) z = int(z) x = int(x) y = int(y) scale = int(scale) ext = "png" driver = "jpeg" if ext == "jpg" else ext indexes = None nodata = None formula = self.request.query_params.get('formula') bands = self.request.query_params.get('bands') rescale = self.request.query_params.get('rescale') color_map = self.request.query_params.get('color_map') hillshade = self.request.query_params.get('hillshade') if formula == '': formula = None if bands == '': bands = None if rescale == '': rescale = None if color_map == '': color_map = None if hillshade == '' or hillshade == '0': hillshade = None try: expr, _ = lookup_formula(formula, bands) except ValueError as e: raise exceptions.ValidationError(str(e)) if tile_type in ['dsm', 'dtm'] and rescale is None: rescale = "0,1000" if tile_type in ['dsm', 'dtm'] and color_map is None: color_map = "gray" if tile_type == 'orthophoto' and formula is not None: if color_map is None: color_map = "gray" if rescale is None: rescale = "-1,1" if nodata is not None: nodata = np.nan if nodata == "nan" else float(nodata) tilesize = scale * 256 url = get_raster_path(task, tile_type) if not os.path.isfile(url): raise exceptions.NotFound() with rasterio.open(url) as src: minzoom, maxzoom = get_zoom_safe(src) has_alpha = has_alpha_band(src) if z < minzoom - ZOOM_EXTRA_LEVELS or z > maxzoom + ZOOM_EXTRA_LEVELS: raise exceptions.NotFound() # Handle N-bands datasets if tile_type == 'orthophoto': ci = src.colorinterp # More than 4 bands? if len(ci) > 4: # Try to find RGBA band order if ColorInterp.red in ci and \ ColorInterp.green in ci and \ ColorInterp.blue in ci: # and ColorInterp.alpha in ci: indexes = (ci.index(ColorInterp.red) + 1, ci.index(ColorInterp.green) + 1, ci.index(ColorInterp.blue) + 1,) # TODO: adding alpha band should fix black backgrounds # but the tiles disappear. Probable bug in rasterio/GDAL else: # Fallback to first four indexes = (1, 2, 3, ) # , 4, ) resampling="nearest" padding=0 if tile_type in ["dsm", "dtm"]: resampling="bilinear" padding=16 try: if expr is not None: tile, mask = expression( url, x, y, z, expr=expr, tilesize=tilesize, nodata=nodata, tile_edge_padding=padding, resampling_method=resampling ) else: tile, mask = main.tile( url, x, y, z, indexes=indexes, tilesize=tilesize, nodata=nodata, tile_edge_padding=padding, resampling_method=resampling ) except TileOutsideBounds: raise exceptions.NotFound("Outside of bounds") # Use alpha channel for transparency, don't use the mask if one is provided (redundant) if has_alpha and expr is None: mask = None if color_map: try: color_map = get_colormap(color_map, format="gdal") except FileNotFoundError: raise exceptions.ValidationError("Not a valid color_map value") intensity = None if hillshade is not None: try: hillshade = float(hillshade) if hillshade <= 0: hillshade = 1.0 except ValueError: raise exceptions.ValidationError("Invalid hillshade value") if tile.shape[0] != 1: raise exceptions.ValidationError("Cannot compute hillshade of non-elevation raster (multiple bands found)") delta_scale = (maxzoom + ZOOM_EXTRA_LEVELS + 1 - z) * 4 dx = src.meta["transform"][0] * delta_scale dy = -src.meta["transform"][4] * delta_scale ls = LightSource(azdeg=315, altdeg=45) # Hillshading is not a local tile operation and # requires neighbor tiles to be rendered seamlessly elevation = get_elevation_tiles(tile[0], url, x, y, z, tilesize, nodata, resampling, padding) intensity = ls.hillshade(elevation, dx=dx, dy=dy, vert_exag=hillshade) intensity = intensity[tilesize:tilesize*2,tilesize:tilesize*2] rgb, rmask = rescale_tile(tile, mask, rescale=rescale) rgb = apply_colormap(rgb, color_map) if intensity is not None: # Quick check if rgb.shape[0] != 3: raise exceptions.ValidationError("Cannot process tile: intensity image provided, but no RGB data was computed.") intensity = intensity * 255.0 rgb = hsv_blend(rgb, intensity) options = img_profiles.get(driver, {}) return HttpResponse( array_to_image(rgb, rmask, img_format=driver, **options), content_type="image/{}".format(ext) )