Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
10 changes: 9 additions & 1 deletion .github/workflows/self-hosted.yml
Original file line number Diff line number Diff line change
Expand Up @@ -5,6 +5,7 @@ on:
- master
- releases/**
pull_request:
workflow_dispatch:

# Cancel in progress workflows on pull_requests.
# https://docs.github.com/en/actions/using-jobs/using-concurrency#example-using-a-fallback-value
Expand Down Expand Up @@ -83,5 +84,12 @@ jobs:
--tag ghcr.io/getsentry/sentry-self-hosted:${{ github.sha }} \
--file self-hosted/Dockerfile \
--build-arg SOURCE_COMMIt=${{ github.sha }} \
"${args[@]}" \
--output type=docker,dest=self-hosted-docker-image \
.

- name: Upload docker image
uses: actions/upload-artifact@v4
with:
retention-days: 1
name: self-hosted-docker-image
path: "self-hosted-docker-image"
8 changes: 4 additions & 4 deletions bin/send_metrics.py
Original file line number Diff line number Diff line change
Expand Up @@ -30,7 +30,7 @@ def make_counter_payload(use_case, org_id, rand_str):
"type": "c",
"value": 1,
"org_id": org_id,
"retention_days": 90,
"retention_days": 730,
"project_id": 3,
}

Expand Down Expand Up @@ -60,7 +60,7 @@ def make_dist_payload(use_case, org_id, rand_str, value_len, b64_encode):
}
),
"org_id": org_id,
"retention_days": 90,
"retention_days": 730,
"project_id": 3,
}

Expand Down Expand Up @@ -93,7 +93,7 @@ def make_set_payload(use_case, org_id, rand_str, value_len, b64_encode):
}
),
"org_id": org_id,
"retention_days": 90,
"retention_days": 730,
"project_id": 3,
}

Expand All @@ -116,7 +116,7 @@ def make_gauge_payload(use_case, org_id, rand_str):
"last": 1,
},
"org_id": org_id,
"retention_days": 90,
"retention_days": 730,
"project_id": 3,
}

Expand Down
2 changes: 1 addition & 1 deletion src/sentry/api/endpoints/organization_releases.py
Original file line number Diff line number Diff line change
Expand Up @@ -353,7 +353,7 @@ def qs_load_func(queryset, total_offset, qs_offset, limit):
release_versions,
filter_params["start"]
if filter_params["start"]
else datetime.utcnow() - timedelta(days=90),
else datetime.utcnow() - timedelta(days=730),
filter_params["end"] if filter_params["end"] else datetime.utcnow(),
)
valid_versions = [
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -54,7 +54,7 @@ def get_time_params(start: datetime, end: datetime) -> MappedParams:
anomaly_detection_range = end - start

if anomaly_detection_range > timedelta(days=14):
snuba_range = timedelta(days=90)
snuba_range = timedelta(days=730)
granularity = 3600

elif anomaly_detection_range > timedelta(days=1):
Expand All @@ -67,7 +67,7 @@ def get_time_params(start: datetime, end: datetime) -> MappedParams:

additional_time_needed = snuba_range - anomaly_detection_range
now = datetime.now(timezone.utc)
start_limit = now - timedelta(days=90)
start_limit = now - timedelta(days=730)
end_limit = now
start = max(start, start_limit)
end = min(end, end_limit)
Expand All @@ -77,7 +77,7 @@ def get_time_params(start: datetime, end: datetime) -> MappedParams:

# If window will go back farther than 90 days, use today - 90 as start
if start - window_increase < start_limit:
query_start = now - timedelta(days=90)
query_start = now - timedelta(days=730)
additional_time_needed -= start - query_start
window_increase = additional_time_needed
# If window extends beyond today, use today as end
Expand Down
2 changes: 1 addition & 1 deletion src/sentry/api/endpoints/project_details.py
Original file line number Diff line number Diff line change
Expand Up @@ -376,7 +376,7 @@ def validate_secondaryGroupingExpiry(self, value):
"Grouping expiry must be sometime within the next 90 days and not in the past. Perhaps you specified the timestamp not in seconds?"
)

max_expiry_date = now + (91 * 24 * 3600)
max_expiry_date = now + (731 * 24 * 3600)
if value > max_expiry_date:
value = max_expiry_date

Expand Down
2 changes: 1 addition & 1 deletion src/sentry/api/endpoints/team_groups_old.py
Original file line number Diff line number Diff line change
Expand Up @@ -35,7 +35,7 @@ def get(self, request: Request, team) -> Response:
.filter(
group_environment_filter,
status=GroupStatus.UNRESOLVED,
last_seen__gt=datetime.now(UTC) - timedelta(days=90),
last_seen__gt=datetime.now(UTC) - timedelta(days=730),
)
.order_by("first_seen")[:limit]
)
Expand Down
2 changes: 1 addition & 1 deletion src/sentry/api/endpoints/team_unresolved_issue_age.py
Original file line number Diff line number Diff line change
Expand Up @@ -52,7 +52,7 @@ def get(self, request: Request, team: Team) -> Response:
.filter(
group_environment_filter,
status=GroupStatus.UNRESOLVED,
last_seen__gt=datetime.now(UTC) - timedelta(days=90),
last_seen__gt=datetime.now(UTC) - timedelta(days=730),
)
.annotate(
bucket=Case(
Expand Down
2 changes: 1 addition & 1 deletion src/sentry/api/serializers/models/group.py
Original file line number Diff line number Diff line change
Expand Up @@ -548,7 +548,7 @@ def _get_start_from_seen_stats(seen_stats: Mapping[Group, SeenStats] | None):

return max(
min(last_seen - timedelta(days=1), datetime.now(timezone.utc) - timedelta(days=14)),
datetime.now(timezone.utc) - timedelta(days=90),
datetime.now(timezone.utc) - timedelta(days=730),
)

@staticmethod
Expand Down
2 changes: 1 addition & 1 deletion src/sentry/api/utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -63,7 +63,7 @@

logger = logging.getLogger(__name__)

MAX_STATS_PERIOD = timedelta(days=90)
MAX_STATS_PERIOD = timedelta(days=730)


def get_datetime_from_stats_period(
Expand Down
2 changes: 1 addition & 1 deletion src/sentry/conf/server.py
Original file line number Diff line number Diff line change
Expand Up @@ -1720,7 +1720,7 @@ def custom_parameter_sort(parameter: dict) -> tuple[str, int]:
# (time in seconds, samples to keep)
(10, 360), # 60 minutes at 10 seconds
(3600, 24 * 7), # 7 days at 1 hour
(3600 * 24, 90), # 90 days at 1 day
(3600 * 24, 730), # 90 days at 1 day
)

# Internal metrics
Expand Down
2 changes: 1 addition & 1 deletion src/sentry/eventstore/snuba/backend.py
Original file line number Diff line number Diff line change
Expand Up @@ -428,7 +428,7 @@ def get_adjacent_event_ids(self, event, filter):
prev_filter.conditions.extend(get_before_event_condition(event))

# We only store 90 days of data, add a few extra days just in case
prev_filter.start = event.datetime - timedelta(days=100)
prev_filter.start = event.datetime - timedelta(days=740)
# the previous event can have the same timestamp, add 1 second
# to the end condition since it uses a less than condition
prev_filter.end = event.datetime + timedelta(seconds=1)
Expand Down
2 changes: 1 addition & 1 deletion src/sentry/incidents/logic.py
Original file line number Diff line number Diff line change
Expand Up @@ -393,7 +393,7 @@ def calculate_incident_time_range(incident, start=None, end=None, windowed_stats

start = end - timedelta(seconds=time_window * WINDOWED_STATS_DATA_POINTS)

retention = quotas.get_event_retention(organization=incident.organization) or 90
retention = quotas.get_event_retention(organization=incident.organization) or 730
start = max(
start.replace(tzinfo=timezone.utc),
datetime.now(timezone.utc) - timedelta(days=retention),
Expand Down
2 changes: 1 addition & 1 deletion src/sentry/ingest/transaction_clusterer/rules.py
Original file line number Diff line number Diff line change
Expand Up @@ -16,7 +16,7 @@
RuleSet = Mapping[ReplacementRule, int]

#: How long a transaction name rule lasts, in seconds.
TRANSACTION_NAME_RULE_TTL_SECS = 90 * 24 * 60 * 60 # 90 days
TRANSACTION_NAME_RULE_TTL_SECS = 730 * 24 * 60 * 60 # 90 days


class RuleStore(Protocol):
Expand Down
2 changes: 1 addition & 1 deletion src/sentry/integrations/slack/unfurl/discover.py
Original file line number Diff line number Diff line change
Expand Up @@ -200,7 +200,7 @@ def unfurl_discover(
else:
interval = saved_query.get("interval")
validated_interval = None
delta = timedelta(days=90)
delta = timedelta(days=730)
if "statsPeriod" in params:
if (parsed_period := parse_stats_period(params["statsPeriod"])) is not None:
delta = parsed_period
Expand Down
2 changes: 1 addition & 1 deletion src/sentry/integrations/time_utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -40,7 +40,7 @@ def time_since(value: datetime):


def get_relative_time(
anchor: int, relative_days: int, retention_days: int = 90
anchor: int, relative_days: int, retention_days: int = 730
) -> Mapping[str, datetime]:
max_time = time.time()
min_time = max_time - retention_days * DAY_IN_SEC
Expand Down
2 changes: 1 addition & 1 deletion src/sentry/issues/endpoints/group_events.py
Original file line number Diff line number Diff line change
Expand Up @@ -91,7 +91,7 @@ def _get_events_snuba(
end: datetime | None,
) -> Response:
default_end = timezone.now()
default_start = default_end - timedelta(days=90)
default_start = default_end - timedelta(days=730)
params: ParamsType = {
"project_id": [group.project_id],
"organization_id": group.project.organization_id,
Expand Down
2 changes: 1 addition & 1 deletion src/sentry/issues/issue_velocity.py
Original file line number Diff line number Diff line change
Expand Up @@ -58,7 +58,7 @@ def calculate_threshold(project: Project) -> float | None:
now = datetime.now()
one_hour_ago = now - timedelta(hours=1)
one_week_ago = now - timedelta(days=7)
ninety_days_ago = now - timedelta(days=90)
ninety_days_ago = now - timedelta(days=730)

subquery = Query(
match=Entity(EntityKey.Events.value),
Expand Down
2 changes: 1 addition & 1 deletion src/sentry/profiles/task.py
Original file line number Diff line number Diff line change
Expand Up @@ -345,7 +345,7 @@ def _normalize_profile(profile: Profile, organization: Organization, project: Pr

@metrics.wraps("process_profile.normalize")
def _normalize(profile: Profile, organization: Organization) -> None:
profile["retention_days"] = quotas.backend.get_event_retention(organization=organization) or 90
profile["retention_days"] = quotas.backend.get_event_retention(organization=organization) or 730
platform = profile["platform"]
version = profile.get("version")

Expand Down
1 change: 1 addition & 0 deletions src/sentry/release_health/base.py
Original file line number Diff line number Diff line change
Expand Up @@ -109,6 +109,7 @@ class SessionsQueryResult(TypedDict):
"14d",
"30d",
"90d",
"730d",
]

OverviewStat = Literal["users", "sessions"]
Expand Down
4 changes: 2 additions & 2 deletions src/sentry/release_health/metrics.py
Original file line number Diff line number Diff line change
Expand Up @@ -538,7 +538,7 @@ def check_has_health_data(
if now is None:
now = datetime.now(timezone.utc)

start = now - timedelta(days=90)
start = now - timedelta(days=730)

projects_list = list(projects_list)

Expand Down Expand Up @@ -1264,7 +1264,7 @@ def get_oldest_health_data_for_releases(
now = datetime.now(timezone.utc)

# TODO: assumption about retention?
start = now - timedelta(days=90)
start = now - timedelta(days=730)

project_ids = [proj_id for proj_id, _release in project_releases]
projects, org_id = self._get_projects_and_org_id(project_ids)
Expand Down
2 changes: 1 addition & 1 deletion src/sentry/replays/lib/event_linking.py
Original file line number Diff line number Diff line change
Expand Up @@ -130,7 +130,7 @@ def _make_json_binary_payload() -> PayloadUnionType:
"replay_id": replay_id,
"project_id": event.project.id,
"segment_id": None,
"retention_days": 90,
"retention_days": 730,
"payload": _make_json_binary_payload(),
}

Expand Down
2 changes: 1 addition & 1 deletion src/sentry/replays/usecases/events.py
Original file line number Diff line number Diff line change
Expand Up @@ -49,7 +49,7 @@ def _replay_event(project_id: int, replay_id: str, event: dict[str, Any]) -> str
"replay_id": replay_id,
"project_id": project_id,
"segment_id": None,
"retention_days": 90,
"retention_days": 730,
"payload": list(json.dumps(event).encode()),
}
)
Expand Down
4 changes: 2 additions & 2 deletions src/sentry/replays/usecases/reader.py
Original file line number Diff line number Diff line change
Expand Up @@ -184,7 +184,7 @@ def has_archived_segment(project_id: int, replay_id: str) -> bool:
# We request the full 90 day range. This is effectively an unbounded timestamp
# range.
Condition(Column("timestamp"), Op.LT, datetime.now()),
Condition(Column("timestamp"), Op.GTE, datetime.now() - timedelta(days=90)),
Condition(Column("timestamp"), Op.GTE, datetime.now() - timedelta(days=730)),
],
granularity=Granularity(3600),
),
Expand Down Expand Up @@ -218,7 +218,7 @@ def _fetch_segments_from_snuba(
# We request the full 90 day range. This is effectively an unbounded timestamp
# range.
Condition(Column("timestamp"), Op.LT, datetime.now()),
Condition(Column("timestamp"), Op.GTE, datetime.now() - timedelta(days=90)),
Condition(Column("timestamp"), Op.GTE, datetime.now() - timedelta(days=730)),
# Used to dynamically pass the "segment_id" condition for details requests.
*conditions,
],
Expand Down
2 changes: 1 addition & 1 deletion src/sentry/search/events/builder/discover.py
Original file line number Diff line number Diff line change
Expand Up @@ -1003,7 +1003,7 @@ def custom_measurement_map(self) -> list[MetricMeta]:
result: list[MetricMeta] = get_custom_measurements(
project_ids=self.params.project_ids,
organization_id=self.organization_id,
start=datetime.today() - timedelta(days=90),
start=datetime.today() - timedelta(days=730),
end=datetime.today(),
)
# Don't fully fail if we can't get the CM, but still capture the exception
Expand Down
4 changes: 2 additions & 2 deletions src/sentry/search/snuba/executors.py
Original file line number Diff line number Diff line change
Expand Up @@ -812,7 +812,7 @@ def query(
# retention date, which may be closer than 90 days in the past, but
# apparently `retention_window_start` can be None(?), so we need a
# fallback.
retention_date = max(_f for _f in [retention_window_start, now - timedelta(days=90)] if _f)
retention_date = max(_f for _f in [retention_window_start, now - timedelta(days=730)] if _f)
start_params = [
date_from,
retention_date,
Expand Down Expand Up @@ -1565,7 +1565,7 @@ def calculate_start_end(
if not end:
end = now + ALLOWED_FUTURE_DELTA

retention_date = max(_f for _f in [retention_window_start, now - timedelta(days=90)] if _f)
retention_date = max(_f for _f in [retention_window_start, now - timedelta(days=730)] if _f)
start_params = [date_from, retention_date, get_search_filter(search_filters, "date", ">")]
start = max(_f for _f in start_params if _f)
end = max([retention_date, end])
Expand Down
7 changes: 4 additions & 3 deletions src/sentry/sentry_metrics/aggregation_option_registry.py
Original file line number Diff line number Diff line change
Expand Up @@ -16,11 +16,12 @@ class TimeWindow(Enum):
FOURTEEN_DAYS = "14d"
THIRTY_DAYS = "30d"
NINETY_DAYS = "90d"
TWO_YEARS = "730d"


METRIC_ID_AGG_OPTION = {
"d:transactions/measurements.fcp@millisecond": {AggregationOption.HIST: TimeWindow.NINETY_DAYS},
"d:transactions/measurements.lcp@millisecond": {AggregationOption.HIST: TimeWindow.NINETY_DAYS},
"d:transactions/measurements.fcp@millisecond": {AggregationOption.HIST: TimeWindow.TWO_YEARS},
"d:transactions/measurements.lcp@millisecond": {AggregationOption.HIST: TimeWindow.TWO_YEARS},
"d:spans/webvital.inp@millisecond": None,
}

Expand All @@ -37,7 +38,7 @@ def get_aggregation_options(mri: str) -> dict[AggregationOption, TimeWindow] | N
return METRIC_ID_AGG_OPTION[mri]
# Then move to use case-level disabled percentiles
elif use_case_id.value in options.get("sentry-metrics.drop-percentiles.per-use-case"):
return {AggregationOption.DISABLE_PERCENTILES: TimeWindow.NINETY_DAYS}
return {AggregationOption.DISABLE_PERCENTILES: TimeWindow.TWO_YEARS}
# And finally 10s granularity if none of the above apply for custom
elif (use_case_id == UseCaseID.CUSTOM) and options.get("sentry-metrics.10s-granularity"):
return {AggregationOption.TEN_SECOND: TimeWindow.SEVEN_DAYS}
Expand Down
2 changes: 1 addition & 1 deletion src/sentry/sentry_metrics/client/kafka.py
Original file line number Diff line number Diff line change
Expand Up @@ -33,7 +33,7 @@ def get_retention_from_org_id(org_id: int) -> int:
else:
# the default in Snuba is 90 days, and if there is no
# org-configured retention stored, we use that default
retention = quotas.backend.get_event_retention(organization=org_id) or 90
retention = quotas.backend.get_event_retention(organization=org_id) or 730
cache.set(cache_key, retention)

return retention
Expand Down
2 changes: 1 addition & 1 deletion src/sentry/sentry_metrics/client/snuba.py
Original file line number Diff line number Diff line change
Expand Up @@ -26,7 +26,7 @@ def get_retention_from_org_id(org_id: int) -> int:
else:
# the default in Snuba is 90 days, and if there is no
# org-configured retention stored, we use that default
retention = quotas.backend.get_event_retention(organization=org_id) or 90
retention = quotas.backend.get_event_retention(organization=org_id) or 730
cache.set(cache_key, retention)

return retention
Expand Down
4 changes: 2 additions & 2 deletions src/sentry/sentry_metrics/consumers/indexer/batch.py
Original file line number Diff line number Diff line change
Expand Up @@ -457,7 +457,7 @@ def reconstruct_messages(
new_payload_v1: Metric = {
"tags": cast(dict[str, int], new_tags),
# XXX: relay actually sends this value unconditionally
"retention_days": old_payload_value.get("retention_days", 90),
"retention_days": old_payload_value.get("retention_days", 730),
"mapping_meta": output_message_meta,
"use_case_id": old_payload_value["use_case_id"].value,
"metric_id": numeric_metric_id,
Expand All @@ -477,7 +477,7 @@ def reconstruct_messages(
new_payload_v2: GenericMetric = {
"tags": cast(dict[str, str], new_tags),
"version": 2,
"retention_days": old_payload_value.get("retention_days", 90),
"retention_days": old_payload_value.get("retention_days", 730),
"mapping_meta": output_message_meta,
"use_case_id": old_payload_value["use_case_id"].value,
"metric_id": numeric_metric_id,
Expand Down
2 changes: 1 addition & 1 deletion src/sentry/sentry_metrics/indexer/postgres/models.py
Original file line number Diff line number Diff line change
Expand Up @@ -50,7 +50,7 @@ class BaseIndexer(Model):
organization_id = BoundedBigIntegerField()
date_added = models.DateTimeField(default=timezone.now)
last_seen = models.DateTimeField(default=timezone.now, db_index=True)
retention_days = models.IntegerField(default=90)
retention_days = models.IntegerField(default=730)

objects: ClassVar[BaseManager[Self]] = BaseManager(
cache_fields=("pk",), cache_ttl=settings.SENTRY_METRICS_INDEXER_CACHE_TTL
Expand Down
4 changes: 2 additions & 2 deletions src/sentry/snuba/metrics_layer/query.py
Original file line number Diff line number Diff line change
Expand Up @@ -586,7 +586,7 @@ def _query_meta_table(
Condition(Column("org_id"), Op.EQ, org_id),
Condition(Column("project_id"), Op.IN, project_ids),
Condition(Column("use_case_id"), Op.EQ, use_case_id.value),
Condition(Column("timestamp"), Op.GTE, datetime.now(UTC) - timedelta(days=90)),
Condition(Column("timestamp"), Op.GTE, datetime.now(UTC) - timedelta(days=730)),
Condition(Column("timestamp"), Op.LT, datetime.now(UTC) + timedelta(days=1)),
]
if extra_condition:
Expand Down Expand Up @@ -674,7 +674,7 @@ def fetch_metric_tag_values(
Condition(Column("project_id"), Op.IN, project_ids),
Condition(Column("metric_id"), Op.EQ, metric_id),
Condition(Column("tag_key"), Op.EQ, tag_key_id),
Condition(Column("timestamp"), Op.GTE, datetime.now(UTC) - timedelta(days=90)),
Condition(Column("timestamp"), Op.GTE, datetime.now(UTC) - timedelta(days=730)),
Condition(Column("timestamp"), Op.LT, datetime.now(UTC) + timedelta(days=1)),
]

Expand Down
Loading