예제 #1
0
def test_hashvalues_python23_strings():
    if six.PY2:
        assert hash_values(["test"],
                           seed="seed") == "334e3fd2f66966a5c785d825c5f03494"
    else:
        assert hash_values(["test"],
                           seed="seed") == "ce35c0ce0d38976f61a5ca951de74a16"
예제 #2
0
def get_previous_releases(project, start_version, limit=5):
    # given a release version + project, return the previous
    # `limit` releases (includes the release specified by `version`)
    key = "get_previous_releases:1:%s" % hash_values([project.id, start_version, limit])
    rv = cache.get(key)
    if rv is None:
        try:
            release_dates = (
                Release.objects.filter(
                    organization_id=project.organization_id, version=start_version, projects=project
                )
                .values("date_released", "date_added")
                .get()
            )
        except Release.DoesNotExist:
            rv = []
        else:
            start_date = release_dates["date_released"] or release_dates["date_added"]

            rv = list(
                Release.objects.filter(projects=project, organization_id=project.organization_id)
                .extra(
                    select={"date": "COALESCE(date_released, date_added)"},
                    where=["COALESCE(date_released, date_added) <= %s"],
                    params=[start_date],
                )
                .extra(order_by=["-date"])[:limit]
            )
        cache.set(key, rv, 60)
    return rv
예제 #3
0
    def _build_session_cache_key(self, project_id):
        start_key = end_key = env_key = ""
        if self.start:
            start_key = self.start.replace(second=0,
                                           microsecond=0,
                                           tzinfo=None)

        if self.end:
            end_key = self.end.replace(second=0, microsecond=0, tzinfo=None)

        if self.end and self.start and self.end - self.start >= timedelta(
                minutes=60):
            # Cache to the hour for longer time range queries, and to the minute if the query if for a time period under 1 hour
            end_key = end_key.replace(minute=0)
            start_key = start_key.replace(minute=0)

        if self.environment_ids:
            self.environment_ids.sort()
            env_key = "-".join(str(eid) for eid in self.environment_ids)

        start_key = start_key.strftime(
            "%m/%d/%Y, %H:%M:%S") if start_key != "" else ""
        end_key = end_key.strftime(
            "%m/%d/%Y, %H:%M:%S") if end_key != "" else ""
        key_hash = hash_values([project_id, start_key, end_key, env_key])
        session_cache_key = f"w-s:{key_hash}"
        return session_cache_key
예제 #4
0
    def has_release_permission(self, request, organization, release):
        """
        Does the given request have permission to access this release, based
        on the projects to which the release is attached?

        If the given request has an actor (user or ApiKey), cache the results
        for a minute on the unique combination of actor,org,release.
        """
        actor_id = None
        has_perms = None
        if getattr(request, "user", None) and request.user.id:
            actor_id = "user:%s" % request.user.id
        if getattr(request, "auth", None) and request.auth.id:
            actor_id = "apikey:%s" % request.auth.id
        if actor_id is not None:
            key = "release_perms:1:%s" % hash_values(
                [actor_id, organization.id, release.id])
            has_perms = cache.get(key)
        if has_perms is None:
            has_perms = ReleaseProject.objects.filter(
                release=release,
                project__in=self.get_projects(request, organization)).exists()
            if actor_id is not None:
                cache.set(key, has_perms, 60)

        return has_perms
예제 #5
0
    def get(self, request, project):
        """
        Get list with release setup progress for a project
        1. tag an error
        2. link a repo
        3. associate commits
        4. tell sentry about a deploy
        """

        tag_key = "onboard_tag:1:%s" % (project.id)
        repo_key = "onboard_repo:1:%s" % (project.organization_id)
        commit_key = "onboard_commit:1:%s" % hash_values([project.organization_id, project.id])
        deploy_key = "onboard_deploy:1:%s" % hash_values([project.organization_id, project.id])
        onboard_cache = cache.get_many([tag_key, repo_key, commit_key, deploy_key])

        tag = onboard_cache.get(tag_key)
        if tag is None:
            tag = Group.objects.filter(project=project.id, first_release__isnull=False).exists()
            cache.set(tag_key, tag, 3600 if tag else 60)

        repo = onboard_cache.get(repo_key)
        if repo is None:
            repo = Repository.objects.filter(organization_id=project.organization_id).exists()
            cache.set(repo_key, repo, 3600 if repo else 60)

        commit = onboard_cache.get(commit_key)
        if commit is None:
            commit = ReleaseCommit.objects.filter(
                organization_id=project.organization_id, release__projects=project.id
            ).exists()
            cache.set(commit_key, commit, 3600 if commit else 60)

        deploy = onboard_cache.get(deploy_key)
        if deploy is None:
            deploy = Deploy.objects.filter(
                organization_id=project.organization_id, release__projects=project.id
            ).exists()
            cache.set(deploy_key, deploy, 3600 if deploy else 60)

        return Response(
            [
                {"step": "tag", "complete": bool(tag)},
                {"step": "repo", "complete": bool(repo)},
                {"step": "commit", "complete": bool(commit)},
                {"step": "deploy", "complete": bool(deploy)},
            ]
        )
예제 #6
0
def follows_semver_versioning_scheme(org_id, project_id, release_version=None):
    """
    Checks if we should follow semantic versioning scheme for ordering based on
    1. Latest ten releases of the project_id passed in all follow semver
    2. provided release version argument is a valid semver version

    Inputs:
        * org_id
        * project_id
        * release_version
    Returns:
        Boolean that indicates if we should follow semantic version or not
    """
    # ToDo(ahmed): Move this function else where to be easily accessible for re-use
    cache_key = "follows_semver:1:%s" % hash_values([org_id, project_id])
    follows_semver = cache.get(cache_key)

    if follows_semver is None:

        # Check if the latest ten releases are semver compliant
        releases_list = list(
            Release.objects.filter(
                organization_id=org_id,
                projects__id__in=[project_id]).order_by("-date_added")[:10])

        if not releases_list:
            cache.set(cache_key, False, 3600)
            return False

        # ToDo(ahmed): re-visit/replace these conditions once we enable project wide `semver` setting
        # A project is said to be following semver versioning schemes if it satisfies the following
        # conditions:-
        # 1: At least one semver compliant in the most recent 3 releases
        # 2: At least 3 semver compliant releases in the most recent 10 releases
        if len(releases_list) <= 2:
            # Most recent release is considered to decide if project follows semver
            follows_semver = releases_list[0].is_semver_release
        elif len(releases_list) < 10:
            # We forego condition 2 and it is enough if condition 1 is satisfied to consider this
            # project to have semver compliant releases
            follows_semver = any(release.is_semver_release
                                 for release in releases_list[0:3])
        else:
            # Count number of semver releases in the last ten
            semver_matches = sum(
                map(lambda release: release.is_semver_release, releases_list))

            at_least_three_in_last_ten = semver_matches >= 3
            at_least_one_in_last_three = any(release.is_semver_release
                                             for release in releases_list[0:3])

            follows_semver = at_least_one_in_last_three and at_least_three_in_last_ten
        cache.set(cache_key, follows_semver, 3600)

    # Check release_version that is passed is semver compliant
    if release_version:
        follows_semver = follows_semver and Release.is_semver_version(
            release_version)
    return follows_semver
예제 #7
0
    def set_cache_key_from_values(self, values):
        if values is None:
            self.cache_key = None
            return

        h = hash_values(values, seed=self.processor.__class__.__name__)
        self.cache_key = rv = 'pf:%s' % h
        return rv
예제 #8
0
    def set_cache_key_from_values(self, values):
        if values is None:
            self.cache_key = None
            return

        h = hash_values(values, seed=self.processor.__class__.__name__)
        self.cache_key = rv = "pf:%s" % h
        return rv
예제 #9
0
 def get_rule_status(self, rule):
     key = "grouprulestatus:1:%s" % hash_values([self.group.id, rule.id])
     rule_status = cache.get(key)
     if rule_status is None:
         rule_status, _ = GroupRuleStatus.objects.get_or_create(
             rule=rule, group=self.group, defaults={"project": self.project}
         )
         cache.set(key, rule_status, 300)
     return rule_status
예제 #10
0
def get_previous_releases(project, start_version, limit=5):
    # given a release version + project, return the previous
    # `limit` releases (includes the release specified by `version`)
    key = "get_previous_releases:1:%s" % hash_values(
        [project.id, start_version, limit])
    rv = cache.get(key)
    if rv is None:
        try:
            first_release = Release.objects.filter(
                organization_id=project.organization_id,
                version=start_version,
                projects=project).get()
        except Release.DoesNotExist:
            rv = []
        else:
            start_date = first_release.date_released or first_release.date_added

            # XXX: This query could be very inefficient for projects with a large
            # number of releases. To work around this, we only check 100 releases
            # ordered by highest release id, which is generally correlated with
            # most recent releases for a project. This isn't guaranteed to be correct,
            # since `date_released` could end up out of order, but should be close
            # enough for what we need this for with suspect commits.
            # To make this better, we should denormalize the coalesce of date_released
            # and date_added onto `ReleaseProject`, which would have benefits for other
            # similar queries.
            rv = list(
                Release.objects.raw(
                    """
                        SELECT sr.*
                        FROM sentry_release as sr
                        INNER JOIN (
                            SELECT release_id
                            FROM sentry_release_project
                            WHERE project_id = %s
                            AND sentry_release_project.release_id <= %s
                            ORDER BY release_id desc
                            LIMIT 100
                        ) AS srp ON (sr.id = srp.release_id)
                        WHERE sr.organization_id = %s
                        AND coalesce(sr.date_released, sr.date_added) <= %s
                        ORDER BY coalesce(sr.date_released, sr.date_added) DESC
                        LIMIT %s;
                    """,
                    [
                        project.id, first_release.id, project.organization_id,
                        start_date, limit
                    ],
                ))
        cache.set(key, rv, 60)
    return rv
예제 #11
0
    def has_release_permission(self, request, organization, release):
        """
        Does the given request have permission to access this release, based
        on the projects to which the release is attached?
        """
        actor_id = None
        if getattr(request, "user", None) and request.user.id:
            actor_id = "user:%s" % request.user.id
        if getattr(request, "auth", None) and request.auth.id:
            actor_id = "apikey:%s" % request.auth.id
        if actor_id is None:
            return False
        key = "release_perms:1:%s" % hash_values(
            [actor_id, organization.id, release.id])
        has_perms = cache.get(key)
        if has_perms is None:
            has_perms = ReleaseProject.objects.filter(
                release=release,
                project__in=self.get_projects(request, organization)).exists()
            cache.set(key, has_perms, 60)

        return has_perms
예제 #12
0
 def _build_rule_status_cache_key(self, rule_id: int) -> str:
     return "grouprulestatus:1:%s" % hash_values([self.group.id, rule_id])
예제 #13
0
파일: tests.py 프로젝트: yaoqi/sentry
def test_hash_values(seed, value, hash):
    assert hash_values([value], seed=seed) == hash
예제 #14
0
파일: cfi.py 프로젝트: sheepwm/sentry
 def _cache_key(self):
     values = [self._get_frame_key(f) for f in self.raw_frames]
     # XXX: The seed is hard coded for a future refactor
     return 'st:%s' % hash_values(values, seed='MinidumpCfiProcessor')
예제 #15
0
def test_hash_values(seed, value, hash):
    assert hash_values([value], seed=seed) == hash
예제 #16
0
    def test_session_count(self):
        group = self.group

        environment = Environment.get_or_create(group.project, "prod")
        dev_environment = Environment.get_or_create(group.project, "dev")
        no_sessions_environment = Environment.get_or_create(
            group.project, "no_sessions")

        self.received = time.time()
        self.session_started = time.time() // 60 * 60
        self.session_release = "[email protected]"
        self.session_crashed_release = "[email protected]"
        self.store_session({
            "session_id": "5d52fd05-fcc9-4bf3-9dc9-267783670341",
            "distinct_id": "39887d89-13b2-4c84-8c23-5d13d2102667",
            "status": "ok",
            "seq": 0,
            "release": self.session_release,
            "environment": "dev",
            "retention_days": 90,
            "org_id": self.project.organization_id,
            "project_id": self.project.id,
            "duration": 1,
            "errors": 0,
            "started": self.session_started - 120,
            "received": self.received - 120,
        })

        self.store_session({
            "session_id": "5e910c1a-6941-460e-9843-24103fb6a63c",
            "distinct_id": "39887d89-13b2-4c84-8c23-5d13d2102668",
            "status": "ok",
            "seq": 0,
            "release": self.session_release,
            "environment": "prod",
            "retention_days": 90,
            "org_id": self.project.organization_id,
            "project_id": self.project.id,
            "duration": 60.0,
            "errors": 0,
            "started": self.session_started - 240,
            "received": self.received - 240,
        })

        self.store_session({
            "session_id": "5e910c1a-6941-460e-9843-24103fb6a63c",
            "distinct_id": "39887d89-13b2-4c84-8c23-5d13d2102669",
            "status": "exited",
            "seq": 1,
            "release": self.session_release,
            "environment": "prod",
            "retention_days": 90,
            "org_id": self.project.organization_id,
            "project_id": self.project.id,
            "duration": 30.0,
            "errors": 0,
            "started": self.session_started,
            "received": self.received,
        })

        self.store_session({
            "session_id": "a148c0c5-06a2-423b-8901-6b43b812cf82",
            "distinct_id": "39887d89-13b2-4c84-8c23-5d13d2102660",
            "status": "crashed",
            "seq": 0,
            "release": self.session_crashed_release,
            "environment": "prod",
            "retention_days": 90,
            "org_id": self.project.organization_id,
            "project_id": self.project.id,
            "duration": 60.0,
            "errors": 0,
            "started": self.session_started,
            "received": self.received,
        })

        result = serialize(
            [group],
            serializer=StreamGroupSerializerSnuba(stats_period="14d"),
        )
        assert "sessionCount" not in result[0]
        result = serialize(
            [group],
            serializer=StreamGroupSerializerSnuba(
                stats_period="14d",
                expand=["sessions"],
            ),
        )
        assert result[0]["sessionCount"] == 3
        result = serialize(
            [group],
            serializer=StreamGroupSerializerSnuba(
                environment_ids=[environment.id],
                stats_period="14d",
                expand=["sessions"]),
        )
        assert result[0]["sessionCount"] == 2

        result = serialize(
            [group],
            serializer=StreamGroupSerializerSnuba(
                environment_ids=[no_sessions_environment.id],
                stats_period="14d",
                expand=["sessions"],
            ),
        )
        assert result[0]["sessionCount"] is None

        result = serialize(
            [group],
            serializer=StreamGroupSerializerSnuba(
                environment_ids=[dev_environment.id],
                stats_period="14d",
                expand=["sessions"]),
        )
        assert result[0]["sessionCount"] == 1

        self.store_session({
            "session_id": "a148c0c5-06a2-423b-8901-6b43b812cf83",
            "distinct_id": "39887d89-13b2-4c84-8c23-5d13d2102627",
            "status": "ok",
            "seq": 0,
            "release": self.session_release,
            "environment": "dev",
            "retention_days": 90,
            "org_id": self.project.organization_id,
            "project_id": self.project.id,
            "duration": 60.0,
            "errors": 0,
            "started": self.session_started - 1590061,  # approximately 18 days
            "received": self.received - 1590061,  # approximately 18 days
        })

        result = serialize(
            [group],
            serializer=StreamGroupSerializerSnuba(
                environment_ids=[dev_environment.id],
                stats_period="14d",
                expand=["sessions"],
                start=timezone.now() - timedelta(days=30),
                end=timezone.now() - timedelta(days=15),
            ),
        )
        assert result[0]["sessionCount"] == 1

        # Delete the cache from the query we did above, else this result comes back as 1 instead of 0.5
        key_hash = hash_values(
            [group.project.id, "", "", f"{dev_environment.id}"])
        cache.delete(f"w-s:{key_hash}")
        project2 = self.create_project(organization=self.organization,
                                       teams=[self.team],
                                       name="Another project")
        data = {
            "fingerprint": ["meow"],
            "timestamp": iso_format(timezone.now()),
            "type": "error",
            "exception": [{
                "type": "Foo"
            }],
        }
        event = self.store_event(data=data, project_id=project2.id)
        self.store_event(data=data, project_id=project2.id)
        self.store_event(data=data, project_id=project2.id)

        result = serialize(
            [group, event.group],
            serializer=StreamGroupSerializerSnuba(
                environment_ids=[dev_environment.id],
                stats_period="14d",
                expand=["sessions"],
            ),
        )
        assert result[0]["sessionCount"] == 2
        # No sessions in project2
        assert result[1]["sessionCount"] is None
예제 #17
0
def build_group_attachment(group,
                           event=None,
                           tags=None,
                           identity=None,
                           actions=None,
                           rules=None):
    # XXX(dcramer): options are limited to 100 choices, even when nested
    status = group.get_status()

    members = get_member_assignees(group)
    teams = get_team_assignees(group)

    logo_url = absolute_uri(
        get_asset_url("sentry", "images/sentry-email-avatar.png"))
    color = (LEVEL_TO_COLOR.get(event.get_tag("level"), "error")
             if event else LEVEL_TO_COLOR["error"])

    text = build_attachment_text(group, event) or ""

    if actions is None:
        actions = []

    assignee = get_assignee(group)

    resolve_button = {
        "name": "resolve_dialog",
        "value": "resolve_dialog",
        "type": "button",
        "text": "Resolve...",
    }

    ignore_button = {
        "name": "status",
        "value": "ignored",
        "type": "button",
        "text": "Ignore"
    }

    project = Project.objects.get_from_cache(id=group.project_id)

    cache_key = "has_releases:1:%s" % hash_values(
        [project.id, project.organization_id])
    has_releases = cache.get(cache_key)
    if has_releases is None:
        has_releases = Release.objects.filter(
            projects=project,
            organization_id=project.organization_id).exists()
        if has_releases:
            cache.set(cache_key, True, 3600)
        else:
            cache.set(cache_key, False, 60)

    if not has_releases:
        resolve_button.update({
            "name": "status",
            "text": "Resolve",
            "value": "resolved"
        })

    if status == GroupStatus.RESOLVED:
        resolve_button.update({
            "name": "status",
            "text": "Unresolve",
            "value": "unresolved"
        })

    if status == GroupStatus.IGNORED:
        ignore_button.update({"text": "Stop Ignoring", "value": "unresolved"})

    option_groups = []

    if teams:
        option_groups.append({"text": "Teams", "options": teams})

    if members:
        option_groups.append({"text": "People", "options": members})

    payload_actions = [
        resolve_button,
        ignore_button,
        {
            "name": "assign",
            "text": "Select Assignee...",
            "type": "select",
            "selected_options": [assignee],
            "option_groups": option_groups,
        },
    ]

    fields = []

    if tags:
        event_tags = event.tags if event else group.get_latest_event().tags

        for key, value in event_tags:
            std_key = tagstore.get_standardized_key(key)
            if std_key not in tags:
                continue

            labeled_value = tagstore.get_tag_value_label(key, value)
            fields.append({
                "title": std_key.encode("utf-8"),
                "value": labeled_value.encode("utf-8"),
                "short": True,
            })

    if actions:
        action_texts = filter(
            None, [build_action_text(group, identity, a) for a in actions])
        text += "\n" + "\n".join(action_texts)

        color = ACTIONED_ISSUE_COLOR
        payload_actions = []

    ts = group.last_seen

    if event:
        event_ts = event.datetime
        ts = max(ts, event_ts)

    footer = u"{}".format(group.qualified_short_id)

    if rules:
        footer += u" via {}".format(rules[0].label)

        if len(rules) > 1:
            footer += u" (+{} other)".format(len(rules) - 1)

    obj = event if event is not None else group
    return {
        "fallback": u"[{}] {}".format(project.slug, obj.title),
        "title": build_attachment_title(obj),
        "title_link": group.get_absolute_url(params={"referrer": "slack"}),
        "text": text,
        "fields": fields,
        "mrkdwn_in": ["text"],
        "callback_id": json.dumps({"issue": group.id}),
        "footer_icon": logo_url,
        "footer": footer,
        "ts": to_timestamp(ts),
        "color": color,
        "actions": payload_actions,
    }
예제 #18
0
파일: cfi.py 프로젝트: getsentry/sentry
 def _get_cache_key(self):
     values = [self._get_frame_key(f) for f in self.raw_frames]
     # XXX: The seed is hard coded for a future refactor
     return 'st:%s' % hash_values(values, seed='MinidumpCfiProcessor')