Exemple #1
0
def load_sentry():
    from resources.constants import RELEASE  # pylint: disable=import-error

    if RELEASE != "LOCAL":
        try:
            import sentry_sdk

            sentry_sdk.set_tag("release", RELEASE)

            def strip_sensitive_data(event, hint):
                print(event, flush=True)

                for i, value in enumerate(
                        event.get("threads", {}).get("values", [])):
                    for ii, frame in enumerate(
                            value.get("stacktrace", {}).get("frames", [])):
                        for env_var_name, _ in frame.get("environ",
                                                         {}).items():
                            if env_var_name in VALID_SECRETS:
                                frame["environ"][env_var_name] = "REDACTED"

                                event["threads"]["values"][i]["stacktrace"][
                                    "frames"][ii] = frame
                                print("found a yikes", flush=True)

                return event

            sentry_sdk.init(SENTRY_URL,
                            traces_sample_rate=1.0,
                            before_send=strip_sensitive_data)

        except:
            print("sentry failed", flush=True)
    def get(self, request, organization):

        project_ids = self.get_requested_project_ids(request)
        projects = self.get_projects(request, organization, project_ids)

        len_projects = len(project_ids)
        sentry_sdk.set_tag("query.num_projects", len_projects)
        sentry_sdk.set_tag("query.num_projects.grouped",
                           format_grouped_length(len_projects))

        if len(projects) == 0:
            return Response([])

        with self.handle_query_errors():
            result = discover.query(
                query="has:sdk.version",
                selected_columns=[
                    "project", "sdk.name", "sdk.version", "last_seen()"
                ],
                orderby="-project",
                params={
                    "start": timezone.now() - timedelta(days=1),
                    "end": timezone.now(),
                    "organization_id": organization.id,
                    "project_id": [p.id for p in projects],
                },
                referrer="api.organization-sdk-updates",
            )

        return Response(serialize(result["data"], projects))
Exemple #3
0
def init_sentry(app: Flask):
    """
    Configure Sentry.
    We need the app to read the Sentry DSN from configuration, and also
    to send some additional meta information.
    """
    sentry_dsn = app.config.get("SENTRY_DSN")
    if not sentry_dsn:
        app.logger.info(
            "[FLEXMEASURES] No SENTRY_DSN setting found, so initialising Sentry cannot happen ..."
        )
        return
    app.logger.info("[FLEXMEASURES] Initialising Sentry ...")
    sentry_sdk.init(
        dsn=sentry_dsn,
        integrations=[FlaskIntegration(), RqIntegration()],
        debug=app.debug,
        release=f"flexmeasures@{get_distribution('flexmeasures').version}",
        send_default_pii=
        True,  # user data (current user id, email address, username) is attached to the event.
        environment=app.env,
        **app.config["FLEXMEASURES_SENTRY_CONFIG"],
    )
    sentry_sdk.set_tag("mode", app.config.get("FLEXMEASURES_MODE"))
    sentry_sdk.set_tag("platform-name",
                       app.config.get("FLEXMEASURES_PLATFORM_NAME"))
    def serialize(self, result, root, event_id=None):
        parent_map = {item["trace.parent_span"]: item for item in result}
        trace_results = [serialize_event(root, None, True)]

        snuba_event = find_event(result, lambda item: item["id"] == event_id)
        if snuba_event is None:
            sentry_sdk.set_tag("query.error_reason", "Matching event not found")
            raise ParseError("event matching matching requested id not found")

        if root["id"] != event_id:
            # Get the root event and see if the current event's span is in the root event
            root_event = eventstore.get_event_by_id(root["project_id"], root["id"])
            root_span = find_event(
                root_event.data.get("spans", []),
                lambda item: item["span_id"] == snuba_event["trace.parent_span"],
            )

            # For the light response, the parent will be unknown unless it is a direct descendent of the root
            trace_results.append(
                serialize_event(snuba_event, root["id"] if root_span is not None else None)
            )

        event = eventstore.get_event_by_id(snuba_event["project_id"], event_id)
        for span in event.data.get("spans", []):
            if span["span_id"] in parent_map:
                child_event = parent_map[span["span_id"]]
                trace_results.append(serialize_event(child_event, event_id))

        return trace_results
        def tracing_function(request, context):
            try:
                start = perf_counter_ns()
                res = prev_func(request, context)
                finished = perf_counter_ns()
                duration = (finished - start) / 1e6  # ms
                user_id = getattr(context, "user_id", None)
                is_api_key = getattr(context, "is_api_key", None)
                self._store_log(method, None, duration, user_id, is_api_key, request, res, None)
                self._observe_in_histogram(method, "", "", duration)
            except Exception as e:
                finished = perf_counter_ns()
                duration = (finished - start) / 1e6  # ms
                code = getattr(context.code(), "name", None)
                traceback = "".join(format_exception(type(e), e, e.__traceback__))
                user_id = getattr(context, "user_id", None)
                is_api_key = getattr(context, "is_api_key", None)
                self._store_log(method, code, duration, user_id, is_api_key, request, None, traceback)
                self._observe_in_histogram(method, code or "", type(e).__name__, duration)

                if not code:
                    sentry_sdk.set_tag("context", "servicer")
                    sentry_sdk.set_tag("method", method)
                    sentry_sdk.capture_exception(e)

                raise e
            return res
Exemple #6
0
    def init_sentry(self):
        # sentry log handler
        sentry_logging = LoggingIntegration(
            level=logging.INFO,  # Capture info and above as breadcrumbs
            event_level=logging.ERROR  # Send errors as events
        )

        # init sentry logging
        sentry_sdk.init(
            dsn=
            "https://[email protected]/2?verify_ssl=0",
            integrations=[sentry_logging],
            release=sickrage.version(),
            environment=('master', 'develop')['dev' in sickrage.version()],
            ignore_errors=[
                'KeyboardInterrupt', 'PermissionError', 'FileNotFoundError',
                'EpisodeNotFoundException'
            ])

        # sentry tags
        sentry_tags = {
            'platform': platform.platform(),
            'locale': repr(locale.getdefaultlocale()),
            'python': platform.python_version()
        }

        # set sentry tags
        for tag_key, tag_value in sentry_tags.items():
            sentry_sdk.set_tag(tag_key, tag_value)
Exemple #7
0
def init(project: SentryProject) -> None:
    # forks like to mess with this, so double check
    comma_remote = is_comma_remote() and "commaai" in get_origin(default="")
    if not comma_remote or not is_registered_device() or PC:
        return

    env = "release" if is_tested_branch() else "master"
    dongle_id = Params().get("DongleId", encoding='utf-8')

    integrations = []
    if project == SentryProject.SELFDRIVE:
        integrations.append(ThreadingIntegration(propagate_hub=True))
    else:
        sentry_sdk.utils.MAX_STRING_LENGTH = 8192

    sentry_sdk.init(project.value,
                    default_integrations=False,
                    release=get_version(),
                    integrations=integrations,
                    traces_sample_rate=1.0,
                    environment=env)

    sentry_sdk.set_user({"id": dongle_id})
    sentry_sdk.set_tag("dirty", is_dirty())
    sentry_sdk.set_tag("origin", get_origin())
    sentry_sdk.set_tag("branch", get_branch())
    sentry_sdk.set_tag("commit", get_commit())
    sentry_sdk.set_tag("device", HARDWARE.get_device_type())

    if project == SentryProject.SELFDRIVE:
        sentry_sdk.Hub.current.start_session()
Exemple #8
0
    def validate(self, data):
        organization = self.context["organization"]
        query_info = data["query_info"]

        # Validate the project field, if provided
        # A PermissionDenied error will be raised in `get_projects_by_id` if the request is invalid
        project_query = query_info.get("project")
        if project_query:
            get_projects_by_id = self.context["get_projects_by_id"]
            # Coerce the query into a set
            if isinstance(project_query, list):
                projects = get_projects_by_id(set(map(int, project_query)))
            else:
                projects = get_projects_by_id({int(project_query)})
            query_info["project"] = [project.id for project in projects]

        # Discover Pre-processing
        if data["query_type"] == ExportQueryType.DISCOVER_STR:
            # coerce the fields into a list as needed
            fields = query_info.get("field", [])
            if not isinstance(fields, list):
                fields = [fields]

            if len(fields) > MAX_FIELDS:
                detail = f"You can export up to {MAX_FIELDS} fields at a time. Please delete some and try again."
                raise serializers.ValidationError(detail)

            query_info["field"] = fields

            if "project" not in query_info:
                projects = self.context["get_projects"]()
                query_info["project"] = [project.id for project in projects]

            # make sure to fix the export start/end times to ensure consistent results
            try:
                start, end = get_date_range_from_params(query_info)
            except InvalidParams as e:
                sentry_sdk.set_tag("query.error_reason", "Invalid date params")
                raise serializers.ValidationError(str(e))

            if "statsPeriod" in query_info:
                del query_info["statsPeriod"]
            if "statsPeriodStart" in query_info:
                del query_info["statsPeriodStart"]
            if "statsPeriodEnd" in query_info:
                del query_info["statsPeriodEnd"]
            query_info["start"] = start.isoformat()
            query_info["end"] = end.isoformat()

            # validate the query string by trying to parse it
            processor = DiscoverProcessor(
                discover_query=query_info,
                organization_id=organization.id,
            )
            try:
                get_filter(query_info["query"], processor.params)
            except InvalidSearchQuery as err:
                raise serializers.ValidationError(str(err))

        return data
    def get(self, request: Request, organization, key) -> Response:
        if not TAG_KEY_RE.match(key):
            return Response({"detail": f'Invalid tag key format for "{key}"'}, status=400)

        sentry_sdk.set_tag("query.tag_key", key)

        try:
            # still used by events v1 which doesn't require global views
            filter_params = self.get_snuba_params(request, organization, check_global_views=False)
        except NoProjects:
            paginator = SequencePaginator([])
        else:
            with self.handle_query_errors():
                paginator = tagstore.get_tag_value_paginator_for_projects(
                    filter_params["project_id"],
                    filter_params.get("environment"),
                    key,
                    filter_params["start"],
                    filter_params["end"],
                    query=request.GET.get("query"),
                    include_transactions=request.GET.get("includeTransactions") == "1",
                )

        return self.paginate(
            request=request,
            paginator=paginator,
            on_results=lambda results: serialize(results, request.user),
        )
Exemple #10
0
def error_handler(x_request_id=None, _msg=None, exception=None):
    """
    Function responsible for deal with
        error to construct a object and send to Sentry.

    Parameters:
        x_request_id: UniqueId
        _msg: Text responsible for complement the exception
        exception: Exception of error Optional[Union[BaseException]]

    Returns:
        void
    """
    sentry_sdk.set_tag("x_request_id", x_request_id)
    exception_id = sentry_sdk.capture_exception(exception, _msg)
    send_err(
        {
            "exception_id": exception_id,
            "exception": exception,
        },
        x_request_id=x_request_id,
    )

    return {
        "error": True,
        "message": f"{_msg}",
        "exception_id": exception_id,
        "x_request_id": x_request_id,
        "status_code": 500,
    }
    def get(self, request, organization):
        with sentry_sdk.start_span(op="discover.endpoint", description="filter_params") as span:
            span.set_data("organization", organization)
            if not self.has_feature(organization, request):
                span.set_data("using_v1_results", True)
                sentry_sdk.set_tag("stats.using_v1", organization.slug)
                return self.get_v1_results(request, organization)

            top_events = 0

            if "topEvents" in request.GET:
                try:
                    top_events = int(request.GET.get("topEvents", 0))
                except ValueError:
                    return Response({"detail": "topEvents must be an integer"}, status=400)
                if top_events > MAX_TOP_EVENTS:
                    return Response(
                        {"detail": f"Can only get up to {MAX_TOP_EVENTS} top events"},
                        status=400,
                    )
                elif top_events <= 0:
                    return Response({"detail": "If topEvents needs to be at least 1"}, status=400)

            # The partial parameter determins whether or not partial buckets are allowed.
            # The last bucket of the time series can potentially be a partial bucket when
            # the start of the bucket does not align with the rollup.
            allow_partial_buckets = request.GET.get("partial") == "1"

        def get_event_stats(query_columns, query, params, rollup):
            if top_events > 0:
                return discover.top_events_timeseries(
                    timeseries_columns=query_columns,
                    selected_columns=request.GET.getlist("field")[:],
                    user_query=query,
                    params=params,
                    orderby=self.get_orderby(request),
                    rollup=rollup,
                    limit=top_events,
                    organization=organization,
                    referrer="api.organization-event-stats.find-topn",
                    allow_empty=False,
                )
            return discover.timeseries_query(
                selected_columns=query_columns,
                query=query,
                params=params,
                rollup=rollup,
                referrer="api.organization-event-stats",
            )

        return Response(
            self.get_event_stats_data(
                request,
                organization,
                get_event_stats,
                top_events,
                allow_partial_buckets=allow_partial_buckets,
            ),
            status=200,
        )
def before_request() -> None:
    if current_user and current_user.is_authenticated:
        sentry_sdk.set_user({
            "id": current_user.id,
        })
    sentry_sdk.set_tag("correlation-id", get_or_set_correlation_id())
    g.request_start = time.perf_counter()
Exemple #13
0
def collect_river_data(parsed_args, config, *args):
    """
    :param :py:class:`argparse.Namespace` parsed_args:
    :param :py:class:`nemo_nowcast.Config` config:

    :return: Nowcast system checklist items
    :rtype: dict
    """
    river_name = parsed_args.river_name
    sentry_sdk.set_tag("river-name", river_name)
    data_date = parsed_args.data_date
    logger.info(
        f"Collecting {river_name} river data for {data_date.format('YYYY-MM-DD')}"
    )
    stn_id = config["rivers"]["stations"][river_name]
    sentry_sdk.set_tag("stn-id", stn_id)
    csv_file_template = config["rivers"]["csv file template"]
    csv_file = Path(
        config["rivers"]["datamart dir"]) / csv_file_template.format(
            stn_id=stn_id)
    day_avg_discharge = _calc_day_avg_discharge(csv_file, data_date)
    daily_avg_file = Path(config["rivers"]["SOG river files"][river_name])
    _store_day_avg_discharge(data_date, day_avg_discharge, daily_avg_file)
    checklist = {
        "river name": river_name,
        "data date": data_date.format("YYYY-MM-DD")
    }
    logger.info(
        f"Appended {river_name} river average discharge for {data_date.format('YYYY-MM-DD')} to: {daily_avg_file}"
    )
    return checklist
def run_worker(queues=None):
    from flask import current_app as app

    sentry_sdk.set_tag("pcapi.app_type", "worker")
    queues = queues or ["default"]
    logger.info("Worker: listening to queues %s", queues)

    log_redis_connection_status()
    with app.app_context():
        log_database_connection_status()

    while True:
        try:
            with app.app_context():
                # This sessions removals are meant to prevent open db connection
                # to spread through forked children and cause bugs in the jobs
                # https://python-rq.org/docs/workers/#the-worker-lifecycle
                # https://docs.sqlalchemy.org/en/13/core/connections.html?highlight=dispose#engine-disposal
                db.session.remove()
                db.session.close()
                db.engine.dispose()
            with Connection(conn):
                worker = Worker(list(map(Queue, queues)),
                                exception_handlers=[log_worker_error])
                worker.work()

        except redis.ConnectionError:
            logger.warning("Worker connection error. Restarting in 5 seconds")
            time.sleep(5)
Exemple #15
0
def main():
    clear_apport_folder(
    )  # Clear apport folder on start, otherwise duplicate crashes won't register
    initial_tombstones = set(get_tombstones())

    sentry_sdk.utils.MAX_STRING_LENGTH = 8192
    sentry_sdk.init(
        "https://[email protected]/157615",
        default_integrations=False,
        release=get_version())

    dongle_id = Params().get("DongleId", encoding='utf-8')
    sentry_sdk.set_user({"id": dongle_id})
    sentry_sdk.set_tag("dirty", get_dirty())
    sentry_sdk.set_tag("origin", get_origin())
    sentry_sdk.set_tag("branch", get_branch())
    sentry_sdk.set_tag("commit", get_commit())
    sentry_sdk.set_tag("device", HARDWARE.get_device_type())

    while True:
        now_tombstones = set(get_tombstones())

        for fn, _ in (now_tombstones - initial_tombstones):
            try:
                cloudlog.info(f"reporting new tombstone {fn}")
                if fn.endswith(".crash"):
                    report_tombstone_apport(fn)
                else:
                    report_tombstone_android(fn)
            except Exception:
                cloudlog.exception(f"Error reporting tombstone {fn}")

        initial_tombstones = now_tombstones
        time.sleep(5)
Exemple #16
0
def logger(txt, err=False, module="", obj=None):
    if (err):
        sentry_sdk.set_tag('Module', module)
        sentry_sdk.set_extra(module, obj)
        sentry_sdk.capture_exception(txt)
    if (current_app.config["DEBUG"]):
        print(txt)
Exemple #17
0
 def _get_dashboard(self, request, organization, dashboard_id):
     prebuilt = Dashboard.get_prebuilt(dashboard_id)
     sentry_sdk.set_tag("dashboard.is_prebuilt", prebuilt is not None)
     if prebuilt:
         return prebuilt
     return Dashboard.objects.get(id=dashboard_id,
                                  organization_id=organization.id)
Exemple #18
0
def main():
    clear_apport_folder(
    )  # Clear apport folder on start, otherwise duplicate crashes won't register
    initial_tombstones = set(get_tombstones())

    sentry_sdk.utils.MAX_STRING_LENGTH = 8192
    sentry_sdk.init(
        "https://[email protected]/5861867",
        default_integrations=False,
        release=version)

    dongle_id = Params().get("DongleId", encoding='utf-8')
    sentry_sdk.set_user({"id": dongle_id})
    sentry_sdk.set_tag("dirty", dirty)
    sentry_sdk.set_tag("origin", origin)
    sentry_sdk.set_tag("branch", branch)
    sentry_sdk.set_tag("commit", commit)
    sentry_sdk.set_tag("device", HARDWARE.get_device_type())

    while True:
        now_tombstones = set(get_tombstones())

        for fn, _ in (now_tombstones - initial_tombstones):
            try:
                cloudlog.info(f"reporting new tombstone {fn}")
                if fn.endswith(".crash"):
                    report_tombstone_apport(fn)
                else:
                    report_tombstone_android(fn)
            except Exception:
                cloudlog.exception(f"Error reporting tombstone {fn}")

        initial_tombstones = now_tombstones
        time.sleep(5)
Exemple #19
0
    def server_checkup(self):
        if self.config.general.server_id:
            server_status = self.api.server.get_status(
                self.config.general.server_id)
            if server_status and not server_status['registered']:
                # re-register server
                server_id = self.api.server.register_server(
                    ip_addresses=','.join([get_internal_ip()]),
                    web_protocol=('http',
                                  'https')[self.config.general.enable_https],
                    web_port=self.config.general.web_port,
                    web_root=self.config.general.web_root,
                    server_version=sickrage.version(),
                )

                if server_id:
                    self.log.info(
                        'Re-registered SiCKRAGE server with SiCKRAGE API')
                    sentry_sdk.set_tag('server_id',
                                       self.config.general.server_id)
                    self.config.general.server_id = server_id
                    self.config.save(mark_dirty=True)
            else:
                self.log.debug('Updating SiCKRAGE server data on SiCKRAGE API')

                # update server information
                self.api.server.update_server(
                    server_id=self.config.general.server_id,
                    ip_addresses=','.join([get_internal_ip()]),
                    web_protocol=('http',
                                  'https')[self.config.general.enable_https],
                    web_port=self.config.general.web_port,
                    web_root=self.config.general.web_root,
                    server_version=sickrage.version(),
                )
    def _post(self, request):
        relay = request.relay
        assert relay is not None  # should be provided during Authentication

        full_config_requested = request.relay_request_data.get("fullConfig")

        if full_config_requested and not relay.is_internal:
            return Response("Relay unauthorized for full config information",
                            403)

        version = request.GET.get("version") or "1"
        set_tag("relay_protocol_version", version)

        if version == "2":
            return self._post_by_key(
                request=request,
                full_config_requested=full_config_requested,
            )
        elif version == "1":
            return self._post_by_project(
                request=request,
                full_config_requested=full_config_requested,
            )
        else:
            return Response(
                "Unsupported version, we only support version null, 1 and 2.",
                400)
Exemple #21
0
def _format_storage_query_and_run(
    timer: Timer,
    query_metadata: SnubaQueryMetadata,
    referrer: str,
    clickhouse_query: Union[Query, CompositeQuery[Table]],
    request_settings: RequestSettings,
    reader: Reader,
    robust: bool,
    concurrent_queries_gauge: Optional[Gauge] = None,
) -> QueryResult:
    """
    Formats the Storage Query and pass it to the DB specific code for execution.
    """
    from_clause = clickhouse_query.get_from_clause()
    visitor = TablesCollector()
    visitor.visit(from_clause)
    table_names = ",".join(sorted(visitor.get_tables()))
    with sentry_sdk.start_span(description="create_query", op="db") as span:
        _apply_turbo_sampling_if_needed(clickhouse_query, request_settings)

        formatted_query = format_query(clickhouse_query)
        span.set_data("query", formatted_query.structured())
        span.set_data("query_size_bytes",
                      _string_size_in_bytes(formatted_query.get_sql()))
        sentry_sdk.set_tag("query_size_group",
                           get_query_size_group(formatted_query.get_sql()))
        metrics.increment("execute")

    timer.mark("prepare_query")

    stats = {
        "clickhouse_table": table_names,
        "final": visitor.any_final(),
        "referrer": referrer,
        "sample": visitor.get_sample_rate(),
    }

    with sentry_sdk.start_span(description=formatted_query.get_sql(),
                               op="db") as span:
        span.set_tag("table", table_names)

        def execute() -> QueryResult:
            return raw_query(
                clickhouse_query,
                request_settings,
                formatted_query,
                reader,
                timer,
                query_metadata,
                stats,
                span.trace_id,
                robust=robust,
            )

        if concurrent_queries_gauge is not None:
            with concurrent_queries_gauge:
                return execute()
        else:
            return execute()
Exemple #22
0
 def index():
     assert request.form["username"] == data["username"]
     assert request.form["age"] == data["age"]
     assert not request.get_data()
     assert not request.get_json()
     set_tag("view", "yes")
     capture_message("hi")
     return "ok"
Exemple #23
0
    def get_hashes(self, force_config=None) -> CalculatedHashes:
        """
        Returns _all_ information that is necessary to group an event into
        issues. It returns two lists of hashes, `(flat_hashes,

        hierarchical_hashes)`:

        1. First, `hierarchical_hashes` is walked
           *backwards* (end to start) until one hash has been found that matches
           an existing group. Only *that* hash gets a GroupHash instance that is
           associated with the group.

        2. If no group was found, an event should be sorted into a group X, if
           there is a GroupHash matching *any* of `flat_hashes`. Hashes that do
           not yet have a GroupHash model get one and are associated with the same
           group (unless they already belong to another group).

           This is how regular grouping works.

        Whichever group the event lands in is associated with exactly one
        GroupHash corresponding to an entry in `hierarchical_hashes`, and an
        arbitrary amount of hashes from `flat_hashes` depending on whether some
        of those hashes have GroupHashes already assigned to other groups (and
        some other things).

        The returned hashes already take SDK fingerprints and checksums into
        consideration.

        """

        # If we have hashes stored in the data we use them, otherwise we
        # fall back to generating new ones from the data.  We can only use
        # this if we do not force a different config.
        if force_config is None:
            rv = CalculatedHashes.from_event(self.data)
            if rv is not None:
                return rv

        # Create fresh hashes
        flat_variants, hierarchical_variants = self.get_sorted_grouping_variants(
            force_config)
        flat_hashes, _ = self._hashes_from_sorted_grouping_variants(
            flat_variants)
        hierarchical_hashes, tree_labels = self._hashes_from_sorted_grouping_variants(
            hierarchical_variants)

        if flat_hashes:
            sentry_sdk.set_tag("get_hashes.flat_variant", flat_hashes[0][0])
        if hierarchical_hashes:
            sentry_sdk.set_tag("get_hashes.hierarchical_variant",
                               hierarchical_hashes[0][0])

        flat_hashes = [hash_ for _, hash_ in flat_hashes]
        hierarchical_hashes = [hash_ for _, hash_ in hierarchical_hashes]

        return CalculatedHashes(hashes=flat_hashes,
                                hierarchical_hashes=hierarchical_hashes,
                                tree_labels=tree_labels)
Exemple #24
0
        def wrapper(*args, **kwargs):
            try:
                return func(*args, **kwargs)

            except Exception as error:
                set_tag('capture_exception.function',
                        f'{func.__module__}.{func.__qualname__}')
                sentry_capture_exception(error)
                raise
Exemple #25
0
    def post(self, request):
        # double check DEMO_MODE is disabled
        if not settings.DEMO_MODE:
            raise Http404

        org = None
        # see if the user already was assigned a member
        member_id = request.get_signed_cookie(MEMBER_ID_COOKIE, default="")
        logger.info("post.start", extra={"cookie_member_id": member_id})
        sentry_sdk.set_tag("member_id", member_id)

        skip_buffer = request.POST.get("skip_buffer") == "1"
        sentry_sdk.set_tag("skip_buffer", skip_buffer)

        if member_id and not skip_buffer:
            try:
                # only assign them to an active org for a member role
                member = OrganizationMember.objects.get(
                    id=member_id,
                    organization__status=OrganizationStatus.ACTIVE,
                    role="member")
            except OrganizationMember.DoesNotExist:
                pass
            else:
                org = member.organization
                user = member.user
                logger.info("post.retrieved_user",
                            extra={"organization_slug": org.slug})

        if not org:
            # move this import here so we Django doesn't discover the models
            # for demo mode except when Demo mode is actually active
            from .demo_org_manager import assign_demo_org

            # assign the demo org and get the user
            org, user = assign_demo_org(skip_buffer=skip_buffer)
            member = OrganizationMember.objects.get(organization=org,
                                                    user=user)

            logger.info("post.assigned_org",
                        extra={"organization_slug": org.slug})

        auth.login(request, user)
        resp = self.redirect(get_redirect_url(request, org))

        # set a cookie of whether the user accepteed tracking so we know
        # whether to initialize analytics when accepted_tracking=1
        # 0 means don't show the footer to accept cookies (user already declined)
        # no value means we show the footer to accept cookies (user has neither accepted nor declined)
        accepted_tracking = request.POST.get(ACCEPTED_TRACKING_COOKIE)
        if accepted_tracking in ["0", "1"]:
            resp.set_cookie(ACCEPTED_TRACKING_COOKIE, accepted_tracking)

        # set the member id
        resp.set_signed_cookie(MEMBER_ID_COOKIE, member.id)
        return resp
Exemple #26
0
    def get(self, request: HttpRequest, organization: Organization,
            trace_id: str) -> HttpResponse:
        if not self.has_feature(organization, request):
            return Response(status=404)

        try:
            # The trace view isn't useful without global views, so skipping the check here
            params = self.get_snuba_params(request,
                                           organization,
                                           check_global_views=False)
        except NoProjects:
            return Response(status=404)

        detailed: bool = request.GET.get("detailed", "0") == "1"
        event_id: Optional[str] = request.GET.get("event_id")

        # Only need to validate event_id as trace_id is validated in the URL
        if event_id and not is_event_id(event_id):
            return Response({"detail": INVALID_ID_DETAILS.format("Event ID")},
                            status=400)

        with self.handle_query_errors():
            transactions, errors = query_trace_data(
                trace_id, params, self.has_snql_feature(organization, request))
            if len(transactions) == 0:
                return Response(status=404)
            self.record_analytics(transactions, trace_id, self.request.user.id,
                                  organization.id)

        warning_extra: Dict[str, str] = {
            "trace": trace_id,
            "organization": organization.slug
        }

        # Look for the roots
        roots: List[SnubaTransaction] = []
        for item in transactions:
            if is_root(item):
                roots.append(item)
            else:
                break
        if len(roots) > 1:
            sentry_sdk.set_tag("discover.trace-view.warning",
                               "root.extra-found")
            logger.warning(
                "discover.trace-view.root.extra-found",
                {
                    "extra_roots": len(roots),
                    **warning_extra
                },
            )

        return Response(
            self.serialize(transactions, errors, roots, warning_extra,
                           event_id, detailed))
Exemple #27
0
def custom_exception_handler(exc, context):
    """ Make custom exception treatment in RestFramework

    :param exc: Exception - you can check specific exception
    :param context: context
    :return: response with error desc
    """
    exception_id = uuid.uuid4()
    logger.error('{} {}'.format(exception_id, exc), exc_info=True)

    exc = _override_exceptions(exc)

    # error body structure
    response_data = {
        'id': exception_id,
        'status_code': status.HTTP_500_INTERNAL_SERVER_ERROR,  # default value
        'version': label_studio.__version__,
        'detail': 'Unknown error',  # default value
        'exc_info': None,
    }
    # try rest framework handler
    response = exception_handler(exc, context)
    if response is not None:
        response_data['status_code'] = response.status_code

        if 'detail' in response.data and isinstance(response.data['detail'],
                                                    ErrorDetail):
            response_data['detail'] = response.data['detail']
            response.data = response_data
        # move validation errors to separate namespace
        else:
            response_data['detail'] = 'Validation error'
            response_data['validation_errors'] = response.data if isinstance(
                response.data, dict) else {
                    'non_field_errors': response.data
                }
            response.data = response_data

    # non-standard exception
    else:
        if sentry_sdk_loaded:
            # pass exception to sentry
            set_tag('exception_id', exception_id)
            capture_exception(exc)

        exc_tb = tb.format_exc()
        logger.debug(exc_tb)
        response_data['detail'] = str(exc)
        if not settings.DEBUG_MODAL_EXCEPTIONS:
            exc_tb = 'Tracebacks disabled in settings'
        response_data['exc_info'] = exc_tb
        response = Response(status=status.HTTP_500_INTERNAL_SERVER_ERROR,
                            data=response_data)

    return response
Exemple #28
0
def tag_delta(errors: List[ComparisonError], tags: Mapping[str, str]) -> None:
    relative_changes = [e.relative_change for e in errors if e.relative_change is not None]
    if relative_changes:
        max_relative_change = max(relative_changes, key=lambda x: abs(x))
        timing("rh.duplex.rel_change", max_relative_change, tags=tags)
        abs_max_relative_change = abs(max_relative_change)
        tag_value = f"{math.ceil(100 * abs_max_relative_change)}"
        if max_relative_change < 0:
            tag_value = f"-{tag_value}"

        set_tag("rh.duplex.rel_change", tag_value)
def check_client_version() -> None:
    client_version_header = flask.request.headers.get("app-version", None)
    sentry_sdk.set_tag("client.version", client_version_header)
    if not client_version_header:
        return
    try:
        client_version = semver.VersionInfo.parse(client_version_header)
    except ValueError:
        raise ForbiddenError(errors={"code": "UPGRADE_REQUIRED"})

    if client_version < settings.NATIVE_APP_MINIMAL_CLIENT_VERSION:
        raise ForbiddenError(errors={"code": "UPGRADE_REQUIRED"})
Exemple #30
0
    def record_cache_hit_type(hit_type: int) -> None:
        span_tag = "cache_miss"
        if hit_type == RESULT_VALUE:
            stats["cache_hit"] = 1
            span_tag = "cache_hit"
        elif hit_type == RESULT_WAIT:
            stats["is_duplicate"] = 1
            span_tag = "cache_wait"

        sentry_sdk.set_tag("cache_status", span_tag)
        if span:
            span.set_data("cache_status", span_tag)