Exemplo n.º 1
0
class Aggregation:
    """Aggregation performed as part of a query."""

    op: AggregationOperation
    field: str = dataclass_field(default="")
    alias: str = dataclass_field(default="")
    argument: any = dataclass_field(default="")
Exemplo n.º 2
0
class TokenAlias:
    alias2token: Dict[str,
                      U[AliasableToken, SQLFunc,
                        SQLIdentifier]] = dataclass_field(default_factory=dict)
    token2alias: Dict[U[AliasableToken, SQLFunc, SQLIdentifier],
                      str] = dataclass_field(default_factory=dict)
    aliased_names: Set[str] = dataclass_field(default_factory=set)
Exemplo n.º 3
0
class StreamBuffersTimeFrame:
    """
    A dataclass where received `StreamBuffer`s for a certain time frame are stored
    """

    start_time: float = dataclass_field(default_factory=time.monotonic)
    buffers: List[StreamBuffer] = dataclass_field(default_factory=list)
Exemplo n.º 4
0
class LookerDashboardSourceReport(SourceReport):
    dashboards_scanned: int = 0
    charts_scanned: int = 0
    filtered_dashboards: List[str] = dataclass_field(default_factory=list)
    filtered_charts: List[str] = dataclass_field(default_factory=list)
    upstream_start_time: Optional[datetime.datetime] = None
    upstream_end_time: Optional[datetime.datetime] = None
    upstream_total_latency_in_seconds: Optional[float] = None

    def report_dashboards_scanned(self) -> None:
        self.dashboards_scanned += 1

    def report_charts_scanned(self) -> None:
        self.charts_scanned += 1

    def report_dashboards_dropped(self, model: str) -> None:
        self.filtered_dashboards.append(model)

    def report_charts_dropped(self, view: str) -> None:
        self.filtered_charts.append(view)

    def report_upstream_latency(self, start_time: datetime.datetime,
                                end_time: datetime.datetime) -> None:
        if self.upstream_start_time is None or self.upstream_start_time > start_time:
            self.upstream_start_time = start_time
        if self.upstream_end_time is None or self.upstream_end_time < end_time:
            self.upstream_end_time = end_time
        self.upstream_total_latency_in_seconds = (
            self.upstream_end_time - self.upstream_start_time).total_seconds()
Exemplo n.º 5
0
class AbstractBuff(ImmovableObject):
    # Buff is a square
    SIDE_LENGTH: int = 25

    # Time in game loop iterations
    _recharge_time: int = dataclass_field(default=200)

    _is_charging: bool = dataclass_field(default=False)
    _charge_time_start: int = dataclass_field(default=0)
    _player_captor: Optional['Player'] = dataclass_field(default=None)

    def capture_this_buff(
            self,
            charge_time_start: int,
            player_captor: 'Player'):
        self._is_charging = True
        self._charge_time_start = charge_time_start
        self._player_captor = player_captor

        self._player_captor.current_buffs.append(self)

    def check_buff_expiration(self, current_time: int):
        if (self._is_charging
                and current_time - self._charge_time_start
                >= self._recharge_time):
            self._player_captor.current_buffs.remove(self)

            self._is_charging = False

    def is_charging(self):
        return self._is_charging
Exemplo n.º 6
0
class LookMLSourceReport(SourceReport):
    models_scanned: int = 0
    views_scanned: int = 0
    explores_scanned: int = 0
    filtered_models: List[str] = dataclass_field(default_factory=list)
    filtered_views: List[str] = dataclass_field(default_factory=list)
    filtered_explores: List[str] = dataclass_field(default_factory=list)

    def report_models_scanned(self) -> None:
        self.models_scanned += 1

    def report_views_scanned(self) -> None:
        self.views_scanned += 1

    def report_explores_scanned(self) -> None:
        self.explores_scanned += 1

    def report_models_dropped(self, model: str) -> None:
        self.filtered_models.append(model)

    def report_views_dropped(self, view: str) -> None:
        self.filtered_views.append(view)

    def report_explores_dropped(self, explore: str) -> None:
        self.filtered_explores.append(explore)
Exemplo n.º 7
0
 def __init__(self, *args, **kwargs):
     super().__init__(*args, **kwargs)
     if make_dataclass:
         self.item_class = make_dataclass(
             "FilesPipelineTestDataClass",
             [
                 ("name", str),
                 # default fields
                 ("image_urls", list, dataclass_field(default_factory=list)),
                 ("images", list, dataclass_field(default_factory=list)),
                 # overridden fields
                 ("custom_image_urls", list, dataclass_field(default_factory=list)),
                 ("custom_images", list, dataclass_field(default_factory=list)),
             ],
         )
Exemplo n.º 8
0
class Config:
    """
    Options found throughout the library for how to
    display docstrings in Markdown (see parameters
    for placeholders in the library documentation)
    """
    alias: Optional[str] = None
    examples_md_lang: str = default_language_examples
    remove_doctest_blanklines: bool = False
    remove_doctest_skip: bool = False
    md_section_level: int = 3
    ignore_custom_section_warning: bool = False
    members: List[str] = dataclass_field(default_factory=list)

    def __post_init__(self):
        # verify types
        config_types = get_type_hints(Config)
        for field in dataclass_fields(self):
            attr = field.name
            val = getattr(self, attr)
            type_ = type(val)

            # handle special cases (non primitives e.g. List[str])
            if attr == 'alias':
                if not isinstance(val, str) and val is not None:
                    raise TypeError(
                        f'Parameter "{attr}" is not a str or None. Type: {type_}'
                    )
            elif attr == 'members':
                if not isinstance(val, list):
                    raise TypeError(
                        f'Parameter "{attr}" is not a list. Type: {type_}')
                elif any(not isinstance(v, str) for v in val):
                    raise TypeError(
                        f'The parameter "{attr}" (list) contains non strings.')
            # handle normal case
            else:
                expected_type = config_types[attr]
                if not isinstance(val, expected_type):
                    raise TypeError(
                        f'Parameter "{attr}" is not of type {expected_type}. Type: {type_}'
                    )

    @staticmethod
    def get_default(field: str):
        """
        Gets the default value of given field of the dataclass.

        Examples
        --------
        >>> Config.get_default('examples_md_lang')
        'python'
        """
        try:
            default = Config.__dataclass_fields__[field].default
            if default is MISSING:  # pragma: no cover
                raise ValueError(f'Field "{field}" has no default value')
            return default
        except KeyError as e:
            raise AttributeError(f'Config has no attribute "{field}"') from e
Exemplo n.º 9
0
class DataIndexer:
    """DataIndexer contains the argument to index data from current accessor.

    :param field_number: Current field number.
    :param aistack: Array index stack in case of nested array in a single message.
    """

    field_number: int
    aistack: List[int] = dataclass_field(default_factory=list)

    def is_valid(self) -> bool:
        """If this data indexer valid."""
        return self.field_number > 0

    def i(self, n: int) -> int:
        """Returns array element index at depth n."""
        return self.aistack[n]

    def index_stack_up(self) -> None:
        self.aistack.append(0)

    def index_stack_down(self) -> None:
        self.aistack.pop()

    @contextmanager
    def index_stack_maintain(self) -> Iterator[None]:
        try:
            self.index_stack_up()
            yield
        finally:
            self.index_stack_down()

    def index_stack_replace(self, k: int) -> None:
        self.aistack[-1] = k
Exemplo n.º 10
0
class MessageProcessor(Processor):
    """MessageProcessor implements Processor for message.
    Assuming compiler generates Message a method: bp_processor to returns this.

    :param field_processors:  List of message field's processors.
    """

    extensible: bool
    nbits: int
    field_processors: List[Processor] = dataclass_field(default_factory=list)

    def flag(self) -> int:
        return FLAG_MESSAGE

    def process(self, ctx: ProcessContext, di: DataIndexer,
                accessor: Accessor) -> None:
        if di.is_valid():
            # Invalid DataIndexer NIL_DATA_INDEXER is passed in right after this message
            # is asked to process, indicates this message is the top level message on the
            # processing chain. The di will be dropped (or overwritten) by the
            # MessageFieldProcessor of this message and never be NIL_DATA_INDEXER again.
            #
            # Rewrite accessor if this message processor is called from a upper accessor.
            accessor = accessor.bp_get_accessor(di)

        # Record current number of bits processed.
        i = ctx.i
        # Opponent message nbits if extensible set.
        ahead = 0

        if self.extensible:
            if ctx.is_encode:
                # Encode extensible ahead  if extensible.
                self.encode_extensible_ahead(ctx)
            else:
                # Decode extensible ahead  if extensible.
                ahead = self.decode_extensible_ahead(ctx)

        # Process fields.
        for field_processor in self.field_processors:
            field_processor.process(ctx, di, accessor)

        # Skip redundant bits post decoding.
        if self.extensible and not ctx.is_encode:
            ito = i + ahead
            if ito >= ctx.i:
                ctx.i = ito

    def encode_extensible_ahead(self, ctx: ProcessContext) -> None:
        """Encode the message nbits as the ahead flag to current bit encoding stream."""
        accessor = IntAccessor(data=self.nbits)
        di = DataIndexer(field_number=1)
        process_base_type(16, ctx, di, accessor)

    def decode_extensible_ahead(self, ctx: ProcessContext) -> int:
        """Decode the message ahead flag as the nbits from current bit decoding stream."""
        accessor = IntAccessor()
        di = DataIndexer(field_number=1)
        process_base_type(16, ctx, di, accessor)
        return accessor.data
Exemplo n.º 11
0
 def __init__(self, *args, **kwargs):
     super().__init__(*args, **kwargs)
     if make_dataclass:
         self.item_class = make_dataclass(
             "TestDataClass",
             [("name", list, dataclass_field(default_factory=list))],
         )
Exemplo n.º 12
0
class LookMLSourceReport(SourceReport):
    models_discovered: int = 0
    models_dropped: List[str] = dataclass_field(default_factory=list)
    views_discovered: int = 0
    views_dropped: List[str] = dataclass_field(default_factory=list)

    def report_models_scanned(self) -> None:
        self.models_discovered += 1

    def report_views_scanned(self) -> None:
        self.views_discovered += 1

    def report_models_dropped(self, model: str) -> None:
        self.models_dropped.append(model)

    def report_views_dropped(self, view: str) -> None:
        self.views_dropped.append(view)
Exemplo n.º 13
0
class LookerDashboardSourceReport(SourceReport):
    dashboards_scanned: int = 0
    charts_scanned: int = 0
    filtered_dashboards: List[str] = dataclass_field(default_factory=list)
    filtered_charts: List[str] = dataclass_field(default_factory=list)

    def report_dashboards_scanned(self) -> None:
        self.dashboards_scanned += 1

    def report_charts_scanned(self) -> None:
        self.charts_scanned += 1

    def report_dashboards_dropped(self, model: str) -> None:
        self.filtered_dashboards.append(model)

    def report_charts_dropped(self, view: str) -> None:
        self.filtered_charts.append(view)
Exemplo n.º 14
0
class Player(MovableObject):
    # If player on (0, 0) location
    HAND_LOCATION: 'Vector2D' = Vector2D(30, 22)

    # Player is a square
    SIDE_LENGTH: int = 45

    current_buffs: List[AbstractBuff] = dataclass_field(default_factory=list)
Exemplo n.º 15
0
class GlueSourceReport(SourceReport):
    tables_scanned = 0
    filtered: List[str] = dataclass_field(default_factory=list)

    def report_table_scanned(self) -> None:
        self.tables_scanned += 1

    def report_table_dropped(self, table: str) -> None:
        self.filtered.append(table)
Exemplo n.º 16
0
class Value:
    name: str  # How it is called on PDF page (ex: СПАСБРОСКИ - Сила)
    value: dataclass_field() = None
    font_size: int = 0  # zero, if use Field default
    explaination: str = ""  # Why this field has this value
    english_name: str = ""  # Name translated into English

    @property
    def repr_value(self):
        return str(self.value)
Exemplo n.º 17
0
class Field:
    """ Represent a field in the PDF certificate file. """
    x: float
    y: float
    scale: float
    filename: str = dataclass_field(init=False)
    count: ClassVar[int] = 0

    def __post_init__(self):
        self.filename = f'temp{Field.count}.pdf'
        Field.count += 1
Exemplo n.º 18
0
class Returns:
    attribute_return: str = dataclass_field(default='', init=False)
    attribute_return_list: List[str] = dataclass_field(default_factory=list,
                                                       init=False)

    def _get_result(self, result: Result) -> Result:
        if len(self.attribute_return_list) > 0 and isclass(result):
            result = DOCUMENT_RESULT

        for attribute in self.attribute_return_list:
            if hasattr(result, '__getitem__'):
                result = result[attribute]
            else:
                result = VALUE_RESULT

        return result

    def __getattr__(self, attr: str) -> R:
        self.attribute_return += '.' + attr
        self.attribute_return_list.append(attr)
        return self
Exemplo n.º 19
0
class Field:
    """
    A bit field in a register
    """

    width: int = 1
    reset: int = 0
    readonly: bool = False
    doc: Optional[str] = None

    mask: int = dataclass_field(init=False)
    shift: int = dataclass_field(init=False)

    def __post_init__(self):
        object.__setattr__(self, "width", int(self.width))
        if self.width < 1:
            raise ValueError("width < 1")

        object.__setattr__(self, "reset", int(self.reset))
        if self.reset.bit_length() > self.width:
            raise ValueError("reset value too large")
        elif self.reset < 0:
            raise ValueError("negative reset")
Exemplo n.º 20
0
class StateTransition:
    """A state transition where up until ``time``, a metric resided in state
    ``state``.

    For any metric ``M``, we observe a series of state transitions.  Suppose
    there are two consecutive state transitions ``t1`` and ``t2``, i.e.

        ``t1.time < t2.time``

    We say that these transitions have _"last semantics"_, which means that at
    any time ``t1.time < t <= t2.time``, values of ``M`` were in state
    ``t2.state``.  In other words, the metric resides in some state ``t1.state``
    up until the time the new transition ``t2`` happens, at which point we know
    that ``t2.state`` occupied the _last time interval_.
    """

    time: Timestamp
    state: State = dataclass_field(compare=False, default=State.UNKNOWN)
Exemplo n.º 21
0
    def complex_initialized_dataclass_field(field_initializer: Callable,
                                            **kwargs) -> Field:
        """
        Allows for the setting of a function to be called on the
        named parameter associated with a field during initialization,
        after __init__() completes.

        Args:
            field_initializer (Callable):
                The function to be called on the field

            **kwargs: To be passed downstream to the dataclasses.field method

        Returns:
            (dataclasses.Field) that contains the field_initializer and kwargs infoÎ
        """
        metadata = kwargs.get("metadata") or {}
        assert DataclassFieldCaster.COMPLEX_INITIALIZER not in metadata
        metadata[DataclassFieldCaster.COMPLEX_INITIALIZER] = field_initializer
        kwargs["metadata"] = metadata
        return dataclass_field(**kwargs)
Exemplo n.º 22
0
class Field(Grouped, Selected, Aliased):
    field: Union[str, Type['Document'], Type[object]]
    used_in_by: bool = dataclass_field(default=False)

    def _to_group_stmt(self, prefix: str, collected: str,
                       alias_to_result: Dict[str, Result]) -> Stmt:
        if self.field in (object, ):
            return Stmt(collected, {},
                        result=ListResult(DOCUMENT_RESULT),
                        alias_to_result=alias_to_result,
                        aliases=self.aliases)

        if self.used_in_by:
            return Stmt(f'field_{self.field}', {},
                        result=VALUE_RESULT,
                        alias_to_result=alias_to_result,
                        aliases=self.aliases)

        return Stmt(f'''{collected}[*].{self.field}''', {},
                    result=ListResult(VALUE_RESULT),
                    alias_to_result=alias_to_result,
                    aliases=self.aliases)

    def _to_select_stmt(self, prefix: str, relative_to: str,
                        alias_to_result: Dict[str, Result]) -> Stmt:
        if self.field in (object, ):
            return Stmt(relative_to, {},
                        result=DOCUMENT_RESULT,
                        aliases=self.aliases,
                        alias_to_result=alias_to_result)

        return Stmt(f'''{relative_to}.{self.field}''', {},
                    result=VALUE_RESULT,
                    alias_to_result=alias_to_result,
                    aliases=self.aliases)

    def __str__(self) -> str:
        return str(self.field)
Exemplo n.º 23
0
class BaseFilter:
    op: FilterOperation
    field: str
    value: any
    caseSensitive: bool = dataclass_field(default=False)
Exemplo n.º 24
0
class Fieldset():
    """ Models a collection of named fields that apply to a register either """
    """ always or under a particular condition """

    size: int
    """ The size (width) of this fieldset """

    condition: str = ""
    """ A text description of conditions under which this fieldset is valid """

    fields: List[Field] = dataclass_field(default_factory=lambda: [])
    """ A list of fields that make up this fieldset """
    def __str__(self):
        if self.condition is not None:
            msg = "Fieldset when {condition}: ".format(
                condition=self.condition)
        else:
            msg = "Fieldset: "

        for field in self.fields:
            msg += "{name}=({msb}:{lsb}) ".format(name=field.name,
                                                  msb=field.msb,
                                                  lsb=field.lsb)

        return msg

    def add_field(self, name, msb, lsb):
        self.fields.append(Field(str(name), int(msb), int(lsb)))

    def is_valid(self):
        expected_total_set = set(range(0, self.size))
        total_set = set()

        for f_idx, f in enumerate(self.fields):
            # Check individual field ranges
            if not (0 <= f.lsb <= f.msb < self.size):
                logger.debug(
                    "Invalid field position for \"{name}\" ({msb}:{lsb})".
                    format(name=f.name, msb=f.msb, lsb=f.lsb))
                return False

            # Check for intersections with other fields in this fieldset
            f_set = set(range(f.lsb, f.msb + 1))
            total_set = total_set.union(f_set)
            for x_idx, x in enumerate(self.fields):
                if f_idx == x_idx: continue
                x_set = set(range(x.lsb, x.msb + 1))
                intersect = f_set.intersection(x_set)
                if len(intersect) > 0:
                    logger.debug(
                        "Invalid field overlap, \"{f_name}\" ({f_msb}:{f_lsb}) "
                        "overlaps with \"{x_name}\" ({x_msb}:{x_lsb})".format(
                            f_name=f.name,
                            f_msb=f.msb,
                            f_lsb=f.lsb,
                            x_name=x.name,
                            x_msb=x.msb,
                            x_lsb=x.lsb))
                    return False

        return True
Exemplo n.º 25
0
class Query(ABC, Returns, Aliased):
    matchers: List[Filter] = dataclass_field(default_factory=list, init=False)

    @abstractmethod
    def _to_stmt(self,
                 prefix: str = 'p',
                 alias_to_result: Dict[str, Result] = None) -> Stmt:
        pass

    def match(self, *match_objects, **key_value_match) -> Q:
        self.matchers += match_objects

        for key, value in key_value_match.items():
            self.matchers.append(eq(key, value))

        return self

    def limit(self, limit) -> Q:
        # TODO
        raise ValueError

    def sort(self, fields: List[str]) -> Q:
        # TODO
        raise ValueError

    def dedup(self, fields: List[str]) -> Q:
        # TODO
        raise ValueError

    def group(
        self, *fields: Union[str, 'Field', Type['Document'], Type[object],
                             'Var'],
        **field_to_display_to_field: Union[str, 'Grouped', Type['Document'],
                                           Type[object], 'Var']
    ) -> 'Group':

        if len(fields) == 0 and len(field_to_display_to_field) == 0:
            fields = [object]

        display_field_to_grouped = {}

        for field in fields:
            if isinstance(field, Field):
                display_field_to_grouped[field.field] = field
                continue

            if isinstance(field, Var):
                display_field_to_grouped[field._name] = field
                continue

            if isinstance(field, Grouped):
                display_field_to_grouped[str(field)] = field
                continue

            display_field_to_grouped[
                'document' if isclass(field) else str(field)] = Field(
                    field=field)

        for display_field, field in field_to_display_to_field.items():
            if isinstance(field, Grouped):
                display_field_to_grouped[display_field] = field
                continue

            display_field_to_grouped[display_field] = Field(field=field)

        return Group(query=self,
                     display_field_to_grouped=display_field_to_grouped)

    def select(
        self, *fields: Union[str, 'Field', Type['Document'], Type[object],
                             'Var'],
        **field_to_display_to_field: Union[str, 'Selected', Type['Document'],
                                           Type[object]]
    ) -> 'Select':

        if len(fields) == 0 and len(field_to_display_to_field) == 0:
            fields = [object]

        display_field_to_grouped = {}

        for field in fields:
            if isinstance(field, Field):
                display_field_to_grouped[field.field] = field
                continue

            if isinstance(field, Var):
                display_field_to_grouped[field._name] = field
                continue

            display_field_to_grouped[
                'document' if isclass(field) else str(field)] = Field(
                    field=field)

        for display_field, field in field_to_display_to_field.items():
            if isinstance(field, Selected):
                display_field_to_grouped[display_field] = field
                continue

            display_field_to_grouped[display_field] = Field(field=field)

        return Select(query=self,
                      display_field_to_grouped=display_field_to_grouped)

    def _get_step_stmts(
            self,
            relative_to: str,
            prefix: str,
            returns: str,
            bind_vars: Dict[str, Any] = None,
            bind_vars_index: int = 0) -> Tuple[str, Dict[str, Any], int]:
        step_stmts = []

        if not bind_vars:
            bind_vars = {}

        for matcher in self.matchers:
            query_stmt, matcher_vars = matcher._to_filter_stmt(
                prefix=f'{prefix}_{bind_vars_index}',
                relative_to=relative_to).expand_without_return()
            step_stmts.append(query_stmt)
            bind_vars.update(matcher_vars)
            bind_vars_index += len(matcher_vars)

        for alias in self.aliases:
            step_stmts.append(f'''LET {alias} = {returns}''')

        return DELIMITER.join(step_stmts), bind_vars, bind_vars_index

    def array(self, inner_query: 'InnerQuery') -> 'Array':
        return Array(outer_query=self, inner_query=inner_query)
Exemplo n.º 26
0
class Aliased:
    aliases: List[str] = dataclass_field(default_factory=list, init=False)

    def as_var(self, variable: str) -> Q:
        self.aliases.append(variable)
        return self
Exemplo n.º 27
0
class EdgeQuery(Filter, Grouped, Selected, InnerQuery):
    edge_collections: List[EdgeCollection]
    outer_query: Query
    direction: str
    min_depth: int = dataclass_field(default=1)
    max_depth: int = dataclass_field(default=1)

    def __post_init__(self):
        if self.max_depth is None:
            if self.min_depth is None:
                self.min_depth, self.max_depth = 1, 1
                return
            self.max_depth = self.min_depth
            return

        if self.min_depth is None:
            self.min_depth = 1

    def to(self, *target_collection_types: Type['Document']):
        return EdgeTargetQuery(
            outer_query_returns='',
            outer_query=self,
            target_collections=[
                t._get_collection() for t in target_collection_types
            ],
            direction=self.direction,
        )

    def _get_traversal_stmt(self,
                            prefix: str,
                            relative_to: str = '',
                            alias_to_result: Dict[str, Result] = None):
        if not alias_to_result:
            alias_to_result = {}

        result = self._get_result(
            AnyResult([e.document_type for e in self.edge_collections]
                      ) if self.edge_collections else DOCUMENT_RESULT)
        step_stmts, bind_vars, bind_vars_index = self._get_step_stmts(
            relative_to=f'{prefix}_e',
            returns=f'{prefix}_e' + self.attribute_return,
            prefix=prefix)

        if self.outer_query:
            previous_stmt = self.outer_query._to_stmt(
                prefix=f'{prefix}_0', alias_to_result=alias_to_result)
            alias_to_result.update(previous_stmt.alias_to_result)
            previous_str, previous_vars = previous_stmt.expand_without_return()
            bind_vars.update(previous_vars)

            return Stmt(f'''
                {previous_str}
                    FOR {prefix}_v, {prefix}_e IN {self.min_depth}..{self.max_depth} {self.direction} {previous_stmt.returns}._id {traversal_edge_collection_names(self.edge_collections)}
                        {step_stmts}
                ''',
                        bind_vars,
                        returns=f'{prefix}_e' + self.attribute_return,
                        result=result,
                        aliases=self.aliases,
                        alias_to_result=alias_to_result)

        return Stmt(f'''
            FOR {prefix}_v, {prefix}_e IN {self.min_depth}..{self.max_depth} {self.direction} {relative_to}._id {traversal_edge_collection_names(self.edge_collections)}
                {step_stmts}
        ''',
                    bind_vars,
                    returns=f'{prefix}_e' + self.attribute_return,
                    result=result,
                    aliases=self.aliases)

    def _to_stmt(self,
                 prefix: str = 'p',
                 alias_to_result: Dict[str, Result] = None) -> Stmt:
        return self._get_traversal_stmt(prefix,
                                        alias_to_result=alias_to_result,
                                        relative_to=self.outer_query_returns)

    def _to_filter_stmt(self,
                        prefix: str = 'p',
                        relative_to: str = None) -> Stmt:
        traversal_stmt = self._get_traversal_stmt(prefix,
                                                  relative_to=relative_to)
        traversal_stmt.query_str = f'''
            LET {prefix}_sub = (
                {traversal_stmt.query_str}
                RETURN 1
            )
    
            FILTER LENGTH({prefix}_sub) > 0'''
        return traversal_stmt

    def _to_group_stmt(self, prefix: str, collected: str,
                       alias_to_result: Dict[str, Result]) -> Stmt:
        traversal_stmt = self._get_traversal_stmt(
            prefix,
            relative_to=f'{prefix}_doc',
            alias_to_result=alias_to_result)

        traversal_stmt.query_str = f'''
            FOR {prefix}_doc in {collected}[*]
                {traversal_stmt.query_str}
        '''

        traversal_stmt.result = ListResult(
            AnyResult([e.document_type for e in self.edge_collections]))

        return traversal_stmt

    def _to_select_stmt(self, prefix: str, relative_to: str,
                        alias_to_result: Dict[str, Result]) -> Stmt:
        traversal_stmt = self._get_traversal_stmt(
            prefix, relative_to=relative_to, alias_to_result=alias_to_result)
        traversal_stmt.result = AnyResult(
            [e.document_type for e in self.edge_collections])
        return traversal_stmt
Exemplo n.º 28
0
class Select(Query, Selected):
    query: Query
    display_field_to_grouped: Dict[str, Grouped] = dataclass_field(
        default_factory=dict)
    by_fields: List[str] = dataclass_field(default_factory=list)

    def __post_init__(self):
        self.aliases = self.query.aliases

    def _to_stmt(self,
                 prefix: str = 'p',
                 alias_to_result: Dict[str, Result] = None) -> Stmt:
        if not alias_to_result:
            alias_to_result = {}

        stmt = self.query._to_stmt(prefix=f'{prefix}_0',
                                   alias_to_result=alias_to_result)
        alias_to_result.update(stmt.alias_to_result)
        previous, bind_vars = stmt.expand_without_return()

        bind_vars_index = 1
        groups_stmt = []

        result = {}
        for display_field, group_field in self.display_field_to_grouped.items(
        ):
            if isinstance(group_field,
                          Field) and group_field.field in self.by_fields:
                group_field.used_in_by = True

            group_stmt = group_field._to_select_stmt(
                prefix=f'{prefix}_{bind_vars_index}',
                relative_to=stmt.returns,
                alias_to_result=alias_to_result)
            alias_to_result.update(group_stmt.alias_to_result)
            group_str, b_vars = group_stmt.expand()

            result[display_field] = group_stmt.result

            field_bind = f'{prefix}_{bind_vars_index + 1}'
            groups_stmt.append(f'@{field_bind}: ({group_str})')
            bind_vars[field_bind] = display_field
            bind_vars.update(b_vars)
            bind_vars_index += 2

        return Stmt(f'''
        {previous}
        RETURN {{
            {f',{DELIMITER}'.join(groups_stmt)}
        }}
        ''',
                    bind_vars,
                    result=self._get_result(DictResult(result)),
                    aliases=self.aliases,
                    alias_to_result=alias_to_result)

    def _to_select_stmt(self,
                        prefix: str,
                        relative_to: str,
                        alias_to_result: Dict[str, Result] = None) -> Stmt:
        return self._to_stmt(prefix, alias_to_result=alias_to_result)
Exemplo n.º 29
0
class Group(Query):
    query: Query
    display_field_to_grouped: Dict[str, Grouped] = dataclass_field(
        default_factory=dict)
    by_fields: List[str] = dataclass_field(default_factory=list)

    def __post_init__(self):
        self.aliases = self.query.aliases

    def by(self, *fields: str) -> 'Group':
        for field in fields:
            self.by_fields.append(field)

        return self

    def _to_stmt(self,
                 prefix: str = 'p',
                 alias_to_result: Dict[str, Result] = None) -> Stmt:
        if not alias_to_result:
            alias_to_result = {}

        if len(self.by_fields) == 0:
            self.by_fields = ['_key']

        stmt = self.query._to_stmt(f'{prefix}_0', alias_to_result)

        alias_to_result.update(stmt.alias_to_result)
        previous, bind_vars = stmt.expand_without_return()
        previous_result = stmt.returns

        by_fields_stmt = []

        for by_field in self.by_fields:
            if isinstance(by_field, str):
                by_fields_stmt.append(
                    f'field_{by_field} = {previous_result}.{by_field}')
            elif isinstance(by_field, Var):
                by_fields_stmt.append(
                    f'field_{by_field._name} = {by_field.attribute_return}')
            else:
                raise TypeError

        bind_vars_index = 1
        groups_stmt = []

        result = {}
        for display_field, group_field in self.display_field_to_grouped.items(
        ):
            print(display_field, group_field)

            if isinstance(group_field,
                          Field) and group_field.field in self.by_fields:
                group_field.used_in_by = True

            group_stmt = group_field._to_group_stmt(
                prefix=f'{prefix}_{bind_vars_index}',
                collected='groups',
                alias_to_result=alias_to_result)
            alias_to_result.update(alias_to_result)
            group_str, b_vars = group_stmt.expand()
            result[display_field] = group_stmt.result

            field_bind = f'{prefix}_{bind_vars_index + 1}'
            groups_stmt.append(f'@{field_bind}: ({group_str})')
            bind_vars[field_bind] = display_field
            bind_vars.update(b_vars)
            bind_vars_index += 2

        return Stmt(f'''
        {previous}
        COLLECT {', '.join(by_fields_stmt)} INTO groups = {previous_result}
        RETURN {{
            {f',{DELIMITER}'.join(groups_stmt)}
        }}
        ''',
                    bind_vars,
                    result=self._get_result(DictResult(result)),
                    aliases=self.aliases)
Exemplo n.º 30
0
class Filter(BaseFilter):
    children: List[BaseFilter] = dataclass_field(default=lambda: [])