Example #1
0
 def __post_init__(self):
     # Needed since neotime.DateTime does not work with Python copy.deepcopy
     # HACK: This is required to mutate frozen dataclasses
     object.__setattr__(self, "created_at",
                        normalize_datetime(self.created_at))
     object.__setattr__(self, "updated_at",
                        normalize_datetime(self.updated_at))
Example #2
0
 def __post_init__(self):
     if isinstance(self.data_type, str):
         # HACK: This is required to mutate frozen dataclasses
         object.__setattr__(self, "data_type",
                            dt.deserialize(self.data_type))
     # HACK: This is required to mutate frozen dataclasses
     object.__setattr__(self, "created_at",
                        normalize_datetime(self.created_at))
     object.__setattr__(self, "updated_at",
                        normalize_datetime(self.updated_at))
Example #3
0
    def from_node(
        cls,
        data: List[Tuple[str, Union[t.GraphValue, "Record", RecordStub]]],
        created_at: datetime,
        updated_at: datetime,
        created_by: str,
        updated_by: str,
        property_map: Optional[Dict[str, ModelProperty]] = None,
        fill_missing: bool = False,
    ) -> "Record":
        """
        Hydrate a `Record` from a Neo4j `Node`.

        This assumes that all non-id fields on the Node are property values.

        - If a property map is provided, missing default values will be used
          to fill in missing property entries from the `Record#values` member.

        - If `fill_missing` is `True` and a property map is provided, any
          property appearing in the property map that is either not present in
         `data` or that does not possess a default value will be set to `None`.

          If `fill_missing` is omitted or `False`, no entry will be filled in
          `Record#values`.
        """
        values = dict(data)

        id = t.RecordId(into_uuid(values.pop("@id")))

        # Pop keywords in a loop instead of a dict-comprehension to avoid
        # allocating a new dictionary
        reserved = [k for k in values if is_reserved_property_name(k)]
        for k in reserved:
            values.pop(k)

        record = cls(
            id=id,
            values=values,
            created_at=normalize_datetime(created_at),
            updated_at=normalize_datetime(updated_at),
            created_by=t.UserNodeId(created_by),
            updated_by=t.UserNodeId(updated_by),
            name=cls.compute_record_name(property_map, values),
        )

        if property_map is not None:
            record.fill_missing_values(property_map, fill_missing=fill_missing)

        return record
Example #4
0
 def __post_init__(self):
     """
     If possible, coerce string filter values to UTC datetimes.
     """
     if isinstance(self.value, str):
         try:
             dt = normalize_datetime(self.value)
         except ValueError:
             pass
         else:
             object.__setattr__(self, "value", dt)
Example #5
0
    def format_value(cls, v):
        """
        Hack: convert NeoTime instances to dates to work around issues with
        dataclasses-json encoding.

        Cannot use a custom dataclass encoder for this because of a bug in
        nested dataclasses.
        """
        if is_datetime(v):
            return normalize_datetime(v).isoformat()
        elif isinstance(v, (Sequence, Set)) and not isinstance(v, str):
            return [cls.format_value(w) for w in v]
        else:
            return v
Example #6
0
 def default(self, obj):
     if is_datetime(obj):
         return normalize_datetime(obj).isoformat()
     else:
         return super().default(obj)
Example #7
0
 def __post_init__(self):
     # HACK: This is required to mutate frozen dataclasses
     object.__setattr__(self, "created_at",
                        u.normalize_datetime(self.created_at))
     object.__setattr__(self, "updated_at",
                        u.normalize_datetime(self.updated_at))
Example #8
0
    def suggest_values(
        self,
        model_name: str,
        model_property_name: str,
        matching_prefix: Optional[str] = None,
        dataset_id: Optional[DatasetId] = None,
        unit: Optional[str] = None,
        limit: int = 10,
    ) -> List[Tuple[Dataset, SuggestedValues]]:
        """
        Suggest values for a property. Suggestions work based on the datatype
        of the specified property:

        For booleans:
            - Return [`True`, `False`]

        For long/double:
            - Return [minimum value, maximum value]. If a `unit` argumenet is
              provided, only return values of this unit.

        For dates:
            - Return [earliest date, latest date]

        For enumerations:
            - Return allowed values

        For arrays:
            - Return the enumeration list (if present)
            - Otherwise, the top K most frequent array values and return those

        For strings:
            - Return first K strings matching the given prefix, or if no
              prefix is provided, return the top-k most frequent strings
        """
        with self.transaction() as tx:

            datasets_and_properties = self._get_properties(
                tx,
                model_id_or_name=model_name,
                model_property_name=model_property_name,
                dataset_id=dataset_id,
                unit=unit,
            )

            property_to_dataset = {}
            possible_properties: List[ModelProperty] = []

            for (d, p) in datasets_and_properties:
                possible_properties.append(p)
                property_to_dataset[p.id] = d

            if not possible_properties:
                raise OperationError(
                    f"No matching values found",
                    cause=ModelPropertyNotFoundError(
                        model="*", property_name=model_property_name),
                )

            suggested_values: List[Tuple[Dataset, SuggestedValues]] = []

            for p in possible_properties:

                prop_values: List[NativeScalar] = []

                if isinstance(p.data_type, dt.Boolean):
                    prop_values = [True, False]
                elif isinstance(p.data_type, (dt.Long, dt.Double, dt.Date)):
                    prop_values = cast(
                        List[NativeScalar],
                        self._get_property_range_values(tx=tx,
                                                        property_id=p.id)
                        or [],
                    )
                    if isinstance(p.data_type, dt.Date):
                        prop_values = [
                            normalize_datetime(dt) for dt in prop_values
                        ]

                elif isinstance(p.data_type, dt.String):
                    prop_values = list(
                        self._get_property_string_values(
                            tx=tx,
                            property_id=p.id,
                            matching_prefix=matching_prefix,
                            limit=limit,
                        ))
                elif isinstance(p.data_type, dt.Enumeration):
                    prop_values = p.data_type.enum or []
                elif isinstance(p.data_type, dt.Array):
                    if p.data_type.enum is not None:
                        prop_values = p.data_type.enum or []
                    else:
                        prop_values = cast(
                            List[NativeScalar],
                            list(
                                self._get_property_array_values(
                                    tx=tx,
                                    property_id=p.id,
                                    matching_prefix=matching_prefix,
                                    limit=limit,
                                )),
                        )
                else:
                    raise ValueError(
                        f"Unsupported datatype: {str(p.data_type)}")

                suggested = SuggestedValues(
                    property_=p,
                    operators=self.get_operators(p.data_type),
                    values=cast(List[GraphValue], prop_values),
                )

                suggested_values.append((property_to_dataset[p.id], suggested))

        return suggested_values
Example #9
0
 def into(self, data: Any) -> datetime:
     return normalize_datetime(data)
Example #10
0
 def json_dump_safe(self, record, many=False):
     for k in record.values:
         v = record.values[k]
         if is_datetime(v):
             record.values[k] = normalize_datetime(v)
     return record