def convert_to_sql_value(value: t.Any, column: Column) -> t.Any: """ Some values which can be passed into Piccolo queries aren't valid in the database. For example, Enums, Table instances, and dictionaries for JSON columns. """ from piccolo.columns.column_types import JSON, JSONB, ForeignKey from piccolo.table import Table if isinstance(value, Table): if isinstance(column, ForeignKey): return getattr( value, column._foreign_key_meta.resolved_target_column._meta.name, ) elif column._meta.primary_key: return getattr(value, column._meta.name) else: raise ValueError( "Table instance provided, and the column isn't a ForeignKey, " "or primary key column." ) elif isinstance(value, Enum): return value.value elif isinstance(column, (JSON, JSONB)) and not isinstance(value, str): if value is None: return None else: return dump_json(value) else: return value
def convert_array_in(value: list): """ Converts a list value into a string. """ if len(value) > 0: if type(value[0]) not in [str, int, float]: raise ValueError("Can only serialise str, int and float.") return dump_json(value)
def __init__( self, default: t.Union[str, t.List, t.Dict, t.Callable[[], t.Union[str, t.List, t.Dict]], None, ] = "{}", **kwargs, ) -> None: self._validate_default(default, (str, list, dict, None)) if isinstance(default, (list, dict)): default = dump_json(default) self.default = default kwargs.update({"default": default}) super().__init__(**kwargs) self.json_operator: t.Optional[str] = None
async def _process_results(self, results): if results: keys = results[0].keys() keys = [i.replace("$", ".") for i in keys] raw = [dict(zip(keys, i.values())) for i in results] else: raw = [] if hasattr(self, "run_callback"): self.run_callback(raw) raw = await self.response_handler(raw) output = getattr(self, "output_delegate", None) if output: if output._output.as_objects: # When using .first() we get a single row, not a list # of rows. if type(raw) is list: raw = [self.table(**columns) for columns in raw] elif raw is None: pass else: raw = self.table(**raw) elif type(raw) is list: if output._output.as_list: if len(raw) == 0: return [] else: if len(raw[0].keys()) != 1: raise ValueError( "Each row returned more than one value" ) else: raw = list( itertools.chain(*[j.values() for j in raw]) ) if output._output.as_json: raw = dump_json(raw) return raw
async def post_single(self, request: Request, data: t.Dict[str, t.Any]) -> Response: """ Adds a single row, if the id doesn't already exist. """ cleaned_data = self._clean_data(data) try: model = self.pydantic_model(**cleaned_data) except ValidationError as exception: return Response(str(exception), status_code=400) try: row = self.table(**model.dict()) if self._hook_map: row = await execute_post_hooks(hooks=self._hook_map, hook_type=HookType.pre_save, row=row) response = await row.save().run() json = dump_json(response) # Returns the id of the inserted row. return CustomJSONResponse(json, status_code=201) except ValueError: return Response("Unable to save the resource.", status_code=500)
async def _process_results(self, results): # noqa: C901 if results: keys = results[0].keys() keys = [i.replace("$", ".") for i in keys] raw = [dict(zip(keys, i.values())) for i in results] else: raw = [] if hasattr(self, "run_callback"): self.run_callback(raw) output: t.Optional[OutputDelegate] = getattr( self, "output_delegate", None ) ####################################################################### if output and output._output.load_json: columns_delegate: t.Optional[ColumnsDelegate] = getattr( self, "columns_delegate", None ) if columns_delegate is not None: json_columns = [ i for i in columns_delegate.selected_columns if isinstance(i, (JSON, JSONB)) ] else: json_columns = self.table._meta.json_columns json_column_names = [] for column in json_columns: if column.alias is not None: json_column_names.append(column.alias) elif column.json_operator is not None: # If no alias is specified, then the default column name # that Postgres gives when using the `->` operator is # `?column?`. json_column_names.append("?column?") elif len(column._meta.call_chain) > 0: json_column_names.append( column.get_select_string( engine_type=column._meta.engine_type ) ) else: json_column_names.append(column._meta.name) processed_raw = [] for row in raw: new_row = {**row} for json_column_name in json_column_names: value = new_row.get(json_column_name) if value is not None: new_row[json_column_name] = load_json(value) processed_raw.append(new_row) raw = processed_raw ####################################################################### raw = await self.response_handler(raw) if output: if output._output.as_objects: # When using .first() we get a single row, not a list # of rows. if type(raw) is list: if output._output.nested: raw = [ make_nested_object(row, self.table) for row in raw ] else: raw = [ self.table(**columns, exists_in_db=True) for columns in raw ] elif raw is None: pass else: if output._output.nested: raw = make_nested_object(raw, self.table) else: raw = self.table(**raw, exists_in_db=True) elif type(raw) is list: if output._output.as_list: if len(raw) == 0: return [] else: if len(raw[0].keys()) != 1: raise ValueError( "Each row returned more than one value" ) else: raw = list( itertools.chain(*[j.values() for j in raw]) ) if output._output.as_json: raw = dump_json(raw) return raw
def test_dump_load(self): """ Test dumping then loading an object. """ payload = {"a": [1, 2, 3]} self.assertEqual(load_json(dump_json(payload)), payload)