def _get_entity_id_list(self, name): ids = request.json.get(name) try: if ids is None or any(int(id) < 0 for id in ids): raise UnprocessableEntity( f"Unexpected post body, should be list of ids named {name}.", what=BAD_VALUE) except ValueError: raise UnprocessableEntity(f"Expected list of integers.", what=BAD_VALUE) return ids
def to_model(self, obj): """ Convert and filter json compatible object to model compatible dict, also filter fields that is not allowed to be edited. """ if obj is None: raise UnprocessableEntity("expected data in request, was empty", what=BAD_VALUE) if not isinstance(obj, Mapping): raise UnprocessableEntity("expected data object in request", what=BAD_VALUE) return { k: self.cols_to_model[k](v) for k, v in obj.items() if k in self.cols_to_model }
def fill_args(args, kwargs): """ Fill params from request in kwargs, raises bad request on validation errors. """ for name, param in args.items(): value = request.args.get(name) if value is None: value = request.form.get(name) if value is None: try: value = request.get_json(silent=True).get(name) except AttributeError: pass if value is None: if param.required: raise ApiError(message=f'Parameter {name} is required.', fields=name, what=REQUIRED) else: try: value = param.converter(value) except Exception as e: raise UnprocessableEntity( fields=name, what=BAD_VALUE, message=f'Failed to validate parameter {name}: {str(e)}' ) kwargs[name] = value
def create_message(self, data, commit=True): # Validate and fetch recipients. recipients = data.pop('recipients', []) if not isinstance(recipients, list): raise UnprocessableEntity("Recipients should be a list.") member_ids = set() for recipient in recipients: type_ = recipient.get('type') if type_ not in ('member', 'group'): raise UnprocessableEntity(what=BAD_VALUE, message='Recipient type should be member or group') try: id_ = natural1(recipient.get('id')) except (ValueError, TypeError): raise UnprocessableEntity(what=BAD_VALUE, message=f'Recipient id should be positive int.') if type_ == 'member': member_ids.add(id_) else: member_ids.update( {i for i, in db_session.query(member_group.c.member_id).filter(member_group.c.group_id == id_)} ) members = db_session.query(Member).filter(Member.member_id.in_(member_ids)).all() if len(members) != len(member_ids): raise UnprocessableEntity('Recipient id is missing in the database.') for member in members: message = self._create_internal({ **data, 'recipient': member.email, 'member_id': member.member_id, 'status': 'queued' }, commit=False) if commit: db_session.commit() return message
def converter(value): if value is None: return None try: return value_converter(value) except Exception as e: raise UnprocessableEntity( f"Failed to save value '{value}' as {key}: {str(e)}", fields=key, what=BAD_VALUE)
def _create_internal(self, data, commit=True): """ Internal create to make it easier for subclasses to manipulated data before create. """ input_data = self.to_model(data) self.validate_all(input_data) if not input_data: raise UnprocessableEntity("Can not create using empty data.") entity = self.model(**input_data) db_session.add(entity) if commit: db_session.commit() else: db_session.flush() # Flush to get id of created entity. return entity
def _update_internal(self, entity_id, data, commit=True): """ Internal update to make it easier for subclasses to manipulated data before update. """ input_data = self.to_model(data) self.validate_present(input_data) if not input_data: raise UnprocessableEntity("Can not update using empty data.") entity = db_session.query(self.model).get(entity_id) if not entity: raise NotFound( "Could not find any entity with specified parameters.") for k, v in input_data.items(): setattr(entity, k, v) if commit: db_session.commit() return self.to_obj(entity)
def add_membership_days(member_id=None, span_type=None, days=None, creation_reason=None, default_start_date=None): assert days >= 0 old_span = db_session.query(Span).filter_by( creation_reason=creation_reason).first() if old_span: if days == (old_span.enddate - old_span.startdate).days and span_type == old_span.type: # Duplicate add days can happend because the code that handles the transactions is not yet done in a db # transaction, there are also an external script for handling puchases in ticktail that can create # dupllicates. return get_membership_summary(member_id) raise UnprocessableEntity(f"Duplicate entry.", fields='creation_reason', what=NOT_UNIQUE) if not default_start_date: default_start_date = date.today() last_end, = db_session.query(func.max(Span.enddate)).filter( Span.member_id == member_id, Span.type == span_type, Span.deleted_at.is_(None)).first() if not last_end or last_end < default_start_date: last_end = default_start_date end = last_end + timedelta(days=days) span = Span(member_id=member_id, startdate=last_end, enddate=end, type=span_type, creation_reason=creation_reason) db_session.add(span) db_session.flush() return get_membership_summary(member_id)
def validate_data(schema, data): try: validate(data, schema=schema) except ValidationError as e: raise UnprocessableEntity(message=f"Data sent in request not in correct format.", log=debug_mode() and str(e))
def not_empty(key, value): if not value: raise UnprocessableEntity(f"'{key}' can not be empty.", fields=key, what=REQUIRED)
def list(self, sort_by=Arg(symbol, required=False), sort_order=Arg(Enum(DESC, ASC), required=False), search: str = Arg(str, required=False), page_size=Arg(natural0, required=False), page=Arg(natural1, required=False), expand=Arg(symbol, required=False), relation=None, related_entity_id=None): query = db_session.query(self.model) if not self.list_deleted: query = query.filter(self.model.deleted_at.is_(None)) if relation and related_entity_id: query = relation.filter(query, related_entity_id) if search: for term in search.split(): expression = or_(*[ self.columns[column_name].like(f"%{term}%") for column_name in self.search_columns ]) query = query.filter(expression) if expand: expand_field = self.expand_fields.get(expand) if not expand_field: raise UnprocessableEntity(f"Expand of {expand} not allowed.", fields='expand', what=BAD_VALUE) query = query.outerjoin( expand_field.relation).add_columns(*expand_field.columns) column_obj_converter = [ to_obj_converters[type(c.type)] for c in expand_field.columns ] # Use to_obj that can unpack result row. def to_obj(row): obj = self.to_obj(row[0]) for value, column, converter in zip(row[1:], expand_field.columns, column_obj_converter): obj[column.name] = converter(value) return obj else: # Use regular to_obj. to_obj = self.to_obj sort_column = sort_by or self.default_sort_column sort_order = sort_order or self.default_sort_order if sort_column: try: column = self.columns[sort_column] except KeyError: raise UnprocessableEntity( f"Can't sort on column {sort_column}.", fields='sort_column', what=BAD_VALUE) order = desc if sort_order == DESC else asc query = query.order_by(order(column)) count = query.count() page_size = 25 if page_size is None else page_size page = page or 1 if page_size: query = query.limit(page_size).offset((page - 1) * page_size) return dict(total=count, page=page, page_size=page_size, last_page=max(1, ceil(count / page_size)) if page_size else 1, data=[to_obj(entity) for entity in query])
def view_wrapper(*args, **kwargs): try: has_permission = (permission == PUBLIC or permission in g.permissions) if not has_permission: raise Forbidden( message= f"'{permission}' permission is required for this operation." ) Arg.fill_args(params, kwargs) data = f(*args, **kwargs) if flat_return: result = jsonify({**data, 'status': status}), code else: result = jsonify({ 'status': status, 'data': data }), code if commit and not commit_on_error: db_session.commit() except IntegrityError as e: if isinstance(e.orig, pymysql.err.IntegrityError): # This parsing of db errors is very sketchy, but there are tests for it so at least we know # if it stops working. errno, error = e.orig.args if errno == DUP_ENTRY: m = re.match(r".*?'([^']*)'.*?'([^']*)'.*", error) if m: value = m.group(1) index = m.group(2) try: fields = fields_by_index[index] raise UnprocessableEntity( f"Duplicate '{fields}', '{value}' already exists.", what=NOT_UNIQUE, fields=fields) except KeyError: logger.warning( f"index {index} is missing in index to fields mapping" ) raise UnprocessableEntity( f"Duplicate '{value}' not allowed.", what=NOT_UNIQUE) else: raise UnprocessableEntity(f"Duplicate entry.", what=NOT_UNIQUE) if errno == BAD_NULL_ERROR: m = re.match(r".*?'([^']*)'.*", error) if m: field = m.group(1) else: field = None raise UnprocessableEntity( f"'{field}' is required." if field else "Required field missing.", fields=field, what=REQUIRED) raise UnprocessableEntity( "Could not save entity using the sent data.", log=f"unrecoginized integrity error: {str(e)}") finally: if commit_on_error: db_session.commit() return result