Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
Expand Up @@ -10,7 +10,7 @@
from sentry.api.serializers.models.group import BaseGroupSerializerResponse
from sentry.models.group import Group
from sentry.utils.cursors import Cursor, CursorResult
from sentry.workflow_engine.models import Detector, Workflow, WorkflowFireHistory
from sentry.workflow_engine.models import Detector, DetectorGroup, Workflow, WorkflowFireHistory


@dataclass(frozen=True)
Expand Down Expand Up @@ -118,13 +118,22 @@ def fetch_workflow_groups_paginated(

# subquery that retrieves row with the largest date in a group
group_max_dates = filtered_history.filter(group=OuterRef("group")).order_by("-date_added")[:1]

# Subquery to get the detector_id from DetectorGroup.
# The detector does not currently need to be connected to the workflow.
detector_subquery = DetectorGroup.objects.filter(
group=OuterRef("group"),
).values(
"detector_id"
)[:1]
Comment on lines +124 to +128

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

High severity vulnerability may affect your project—review required:
Line 125 lists a dependency (django) with a known High severity vulnerability.

ℹ️ Why this matters

Affected versions of Django are vulnerable to Improper Neutralization of Special Elements used in an SQL Command ('SQL Injection'). SQL injection in Django's ORM column aliases: when using QuerySet.annotate(), QuerySet.alias(), QuerySet.aggregate(), or QuerySet.extra() with dictionary expansion (**kwargs), the dictionary keys are used unescaped as SQL column aliases. On MySQL and MariaDB backends, an attacker who can influence those keys (for example, by passing a crafted dict of annotations) can inject arbitrary SQL into the generated query.

References: GHSA, CVE

To resolve this comment:
Check if you are using Django with MySQL or MariaDB.

  • If you're affected, upgrade this dependency to at least version 5.2.7 at uv.lock.
  • If you're not affected, comment /fp we don't use this [condition]
💬 Ignore this finding

To ignore this, reply with:

  • /fp <comment> for false positive
  • /ar <comment> for acceptable risk
  • /other <comment> for all other reasons

You can view more details on this finding in the Semgrep AppSec Platform here.

Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

What happens with this subquery when there isn't a DetectorGroup? Groups do exist visibly before DetectorGroup for a short period, but also our backfill isn't quite finished.
This is my only concern at this point.

Copy link
Member Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

It will return None and the frontend will assume it's associated with the issue stream detector (maybe not implemented yet but it will)


qs = (
filtered_history.select_related("group", "detector")
filtered_history.select_related("group")
.values("group")
.annotate(count=Count("group"))
.annotate(event_id=Subquery(group_max_dates.values("event_id")))
.annotate(last_triggered=Max("date_added"))
.annotate(detector_id=Subquery(group_max_dates.values("detector_id")))
.annotate(detector_id=Subquery(detector_subquery))
)

# Count distinct groups for pagination
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -11,7 +11,7 @@
WorkflowGroupHistory,
fetch_workflow_groups_paginated,
)
from sentry.workflow_engine.models import Workflow, WorkflowFireHistory
from sentry.workflow_engine.models import DetectorGroup, Workflow, WorkflowFireHistory

pytestmark = [requires_snuba]

Expand All @@ -32,6 +32,10 @@ def setUp(self) -> None:
project_id=self.project.id,
type=MetricIssue.slug,
)
DetectorGroup.objects.create(
detector=self.detector_1,
group=self.group,
)
for i in range(3):
self.history.append(
WorkflowFireHistory(
Expand All @@ -46,6 +50,10 @@ def setUp(self) -> None:
project_id=self.project.id,
type=MetricIssue.slug,
)
DetectorGroup.objects.create(
detector=self.detector_2,
group=self.group_2,
)
self.history.append(
WorkflowFireHistory(
detector=self.detector_2,
Expand All @@ -59,6 +67,10 @@ def setUp(self) -> None:
project_id=self.project.id,
type=MetricIssue.slug,
)
DetectorGroup.objects.create(
detector=self.detector_3,
group=self.group_3,
)
for i in range(2):
self.history.append(
WorkflowFireHistory(
Expand Down
Original file line number Diff line number Diff line change
@@ -1,13 +1,16 @@
from uuid import uuid4

from sentry.api.serializers import serialize
from sentry.grouping.grouptype import ErrorGroupType
from sentry.incidents.grouptype import MetricIssue
from sentry.testutils.cases import APITestCase
from sentry.testutils.helpers.datetime import before_now, freeze_time
from sentry.testutils.skips import requires_snuba
from sentry.workflow_engine.endpoints.serializers.workflow_group_history_serializer import (
WorkflowGroupHistory,
WorkflowGroupHistorySerializer,
)
from sentry.workflow_engine.models import DetectorGroup
from sentry.workflow_engine.models.workflow_fire_history import WorkflowFireHistory

pytestmark = [requires_snuba]
Expand All @@ -26,6 +29,14 @@ def setUp(self) -> None:

self.history: list[WorkflowFireHistory] = []
self.workflow = self.create_workflow(organization=self.organization)
self.detector = self.create_detector(
project=self.project,
type=ErrorGroupType.slug,
)
DetectorGroup.objects.create(
detector=self.detector,
group=self.group,
)
for i in range(3):
self.history.append(
WorkflowFireHistory(
Expand All @@ -35,6 +46,14 @@ def setUp(self) -> None:
)
)
self.group_2 = self.create_group()
self.detector_2 = self.create_detector(
project=self.project,
type=MetricIssue.slug,
)
DetectorGroup.objects.create(
detector=self.detector_2,
group=self.group_2,
)
self.history.append(
WorkflowFireHistory(
workflow=self.workflow,
Expand Down Expand Up @@ -63,14 +82,18 @@ def test_simple(self) -> None:
assert resp.data == serialize(
[
WorkflowGroupHistory(
self.group, 3, self.base_triggered_date, self.history[0].event_id, detector=None
self.group,
3,
self.base_triggered_date,
self.history[0].event_id,
detector=self.detector,
),
WorkflowGroupHistory(
self.group_2,
1,
self.base_triggered_date,
self.history[-1].event_id,
detector=None,
detector=self.detector_2,
),
],
self.user,
Expand All @@ -88,7 +111,11 @@ def test_pagination(self) -> None:
assert resp.data == serialize(
[
WorkflowGroupHistory(
self.group, 3, self.base_triggered_date, self.history[0].event_id, detector=None
self.group,
3,
self.base_triggered_date,
self.history[0].event_id,
detector=self.detector,
)
],
self.user,
Expand All @@ -111,7 +138,7 @@ def test_pagination(self) -> None:
1,
self.base_triggered_date,
self.history[-1].event_id,
detector=None,
detector=self.detector_2,
)
],
self.user,
Expand Down
Loading