Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

ref: annotate another create_project #73198

Merged
merged 1 commit into from
Jun 24, 2024
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion bin/mock-replay
Original file line number Diff line number Diff line change
Expand Up @@ -77,7 +77,7 @@ def create_recording(replay_id, project_id, timestamp):
store_replay_segments(replay_id, project_id, segment_id, segment)


def store_replay_segments(replay_id: str, project_id: str, segment_id: int, segment):
def store_replay_segments(replay_id: str, project_id: int, segment_id: int, segment) -> None:
f = File.objects.create(name="rr:{segment_id}", type="replay.recording")
f.putfile(BytesIO(compress(dumps_htmlsafe(segment).encode())))
ReplayRecordingSegment.objects.create(
Expand Down
6 changes: 3 additions & 3 deletions src/sentry/replays/testutils.py
Original file line number Diff line number Diff line change
Expand Up @@ -71,7 +71,7 @@ def assert_expected_response(response: dict[str, Any], expected_response: dict[s


def mock_expected_response(
project_id: str,
project_id: int,
replay_id: str,
started_at: datetime.datetime,
finished_at: datetime.datetime,
Expand Down Expand Up @@ -222,7 +222,7 @@ def mock_replay(

def mock_replay_click(
timestamp: datetime.datetime,
project_id: str,
project_id: int,
replay_id: str,
**kwargs: Any,
) -> dict[str, Any]:
Expand Down Expand Up @@ -266,7 +266,7 @@ def mock_replay_click(

def mock_replay_viewed(
timestamp: float,
project_id: str,
project_id: int,
replay_id: str,
viewed_by_id: int,
retention_days: int = 30,
Expand Down
4 changes: 2 additions & 2 deletions src/sentry/testutils/cases.py
Original file line number Diff line number Diff line change
Expand Up @@ -2460,10 +2460,10 @@ def store_replays(self, replays):
def store_replay_segments(
self,
replay_id: str,
project_id: str,
project_id: int,
segment_id: int,
segment,
):
) -> None:
f = File.objects.create(name="rr:{segment_id}", type="replay.recording")
f.putfile(BytesIO(compress(dumps_htmlsafe(segment).encode())))
ReplayRecordingSegment.objects.create(
Expand Down
2 changes: 1 addition & 1 deletion src/sentry/testutils/fixtures.py
Original file line number Diff line number Diff line change
Expand Up @@ -160,7 +160,7 @@ def create_environment(self, project=None, **kwargs):
project = self.project
return Factories.create_environment(project=project, **kwargs)

def create_project(self, **kwargs):
def create_project(self, **kwargs) -> Project:
if "teams" not in kwargs:
kwargs["teams"] = [self.team]
return Factories.create_project(**kwargs)
Expand Down
4 changes: 2 additions & 2 deletions src/sentry/tsdb/base.py
Original file line number Diff line number Diff line change
Expand Up @@ -685,13 +685,13 @@ def get_frequency_series(
def get_frequency_totals(
self,
model: TSDBModel,
items: Mapping[str, Sequence[str]],
items: Mapping[TSDBKey, Sequence[TSDBItem]],
start: datetime,
end: datetime | None = None,
rollup: int | None = None,
environment_id: int | None = None,
tenant_ids: dict[str, str | int] | None = None,
) -> dict[str, dict[str, float]]:
) -> dict[TSDBKey, dict[TSDBItem, float]]:
"""
Retrieve the total frequency of known items in a table over time.

Expand Down
15 changes: 8 additions & 7 deletions src/sentry/tsdb/dummy.py
Original file line number Diff line number Diff line change
Expand Up @@ -141,13 +141,14 @@ def get_frequency_series(

def get_frequency_totals(
self,
model,
items: Mapping[str, Sequence[str]],
start,
end=None,
rollup=None,
environment_id=None,
):
model: TSDBModel,
items: Mapping[TSDBKey, Sequence[TSDBItem]],
start: datetime,
end: datetime | None = None,
rollup: int | None = None,
environment_id: int | None = None,
tenant_ids: dict[str, str | int] | None = None,
) -> dict[TSDBKey, dict[TSDBItem, float]]:
self.validate_arguments([model], [environment_id])
results = {}
for key, members in items.items():
Expand Down
15 changes: 8 additions & 7 deletions src/sentry/tsdb/inmemory.py
Original file line number Diff line number Diff line change
Expand Up @@ -310,13 +310,14 @@ def get_frequency_series(

def get_frequency_totals(
self,
model,
items: Mapping[str, Sequence[str]],
start,
end=None,
rollup=None,
environment_id=None,
):
model: TSDBModel,
items: Mapping[TSDBKey, Sequence[TSDBItem]],
start: datetime,
end: datetime | None = None,
rollup: int | None = None,
environment_id: int | None = None,
tenant_ids: dict[str, str | int] | None = None,
) -> dict[TSDBKey, dict[TSDBItem, float]]:
self.validate_arguments([model], [environment_id])

results = {}
Expand Down
6 changes: 3 additions & 3 deletions src/sentry/tsdb/redis.py
Original file line number Diff line number Diff line change
Expand Up @@ -964,19 +964,19 @@ def get_frequency_series(
def get_frequency_totals(
self,
model: TSDBModel,
items: Mapping[str, Sequence[str]],
items: Mapping[TSDBKey, Sequence[TSDBItem]],
start: datetime,
end: datetime | None = None,
rollup: int | None = None,
environment_id: int | None = None,
tenant_ids: dict[str, str | int] | None = None,
) -> dict[str, dict[str, float]]:
) -> dict[TSDBKey, dict[TSDBItem, float]]:
self.validate_arguments([model], [environment_id])

if not self.enable_frequency_sketches:
raise NotImplementedError("Frequency sketches are disabled.")

responses: dict[str, dict[str, float]] = {}
responses: dict[TSDBKey, dict[TSDBItem, float]] = {}
frequency_series = self.get_frequency_series(
model, items, start, end, rollup, environment_id
)
Expand Down
16 changes: 8 additions & 8 deletions src/sentry/tsdb/snuba.py
Original file line number Diff line number Diff line change
Expand Up @@ -911,14 +911,14 @@ def get_frequency_series(

def get_frequency_totals(
self,
model,
items: Mapping[str, Sequence[str]],
start,
end=None,
rollup=None,
environment_id=None,
tenant_ids=None,
):
model: TSDBModel,
items: Mapping[TSDBKey, Sequence[TSDBItem]],
start: datetime,
end: datetime | None = None,
rollup: int | None = None,
environment_id: int | None = None,
tenant_ids: dict[str, str | int] | None = None,
) -> dict[TSDBKey, dict[TSDBItem, float]]:
return self.get_data(
model,
items,
Expand Down
18 changes: 10 additions & 8 deletions tests/sentry/api/endpoints/test_organization_release_details.py
Original file line number Diff line number Diff line change
Expand Up @@ -173,7 +173,7 @@ def test_incorrect_sort_option_should_return_invalid_sort_response(self):
"sentry-api-0-organization-release-details",
kwargs={"organization_id_or_slug": self.organization.slug, "version": release.version},
)
response = self.client.get(url, {"project": self.project1.id, "sort": "invalid_sort"})
response = self.client.get(url, {"project": str(self.project1.id), "sort": "invalid_sort"})
assert response.status_code == 400

def test_get_prev_and_next_release_to_current_release_on_date_sort(self):
Expand Down Expand Up @@ -329,7 +329,7 @@ def test_get_prev_and_next_release_to_current_release_on_date_sort_env_filter_ap
"version": release_3.version,
},
)
response = self.client.get(url, {"project": self.project1.id, "environment": ["prod"]})
response = self.client.get(url, {"project": str(self.project1.id), "environment": ["prod"]})
assert response.status_code == 200
assert response.data["currentProjectMeta"]["nextReleaseVersion"] is None
assert response.data["currentProjectMeta"]["prevReleaseVersion"] == "foobar@1.0.0"
Expand Down Expand Up @@ -384,7 +384,7 @@ def test_get_prev_and_next_release_to_current_release_on_date_sort_status_filter
"version": release_3.version,
},
)
response = self.client.get(url, {"project": self.project1.id, "status": "archived"})
response = self.client.get(url, {"project": str(self.project1.id), "status": "archived"})
assert response.status_code == 200
assert response.data["currentProjectMeta"]["prevReleaseVersion"] is None
assert response.data["currentProjectMeta"]["nextReleaseVersion"] is None
Expand Down Expand Up @@ -421,7 +421,7 @@ def test_get_prev_and_next_release_to_current_release_on_date_sort_query_filter_
"version": release_2.version,
},
)
response = self.client.get(url, {"project": self.project1.id, "query": "foobar@1"})
response = self.client.get(url, {"project": str(self.project1.id), "query": "foobar@1"})
assert response.status_code == 200
assert response.data["currentProjectMeta"]["prevReleaseVersion"] == "foobar@1.0.0"
assert response.data["currentProjectMeta"]["nextReleaseVersion"] is None
Expand Down Expand Up @@ -461,7 +461,9 @@ def test_get_prev_and_next_release_on_date_sort_does_not_apply_stats_period_filt
"version": release_1.version,
},
)
response = self.client.get(url, {"project": self.project1.id, "summaryStatsPeriod": "24h"})
response = self.client.get(
url, {"project": str(self.project1.id), "summaryStatsPeriod": "24h"}
)
assert response.status_code == 200
assert response.data["currentProjectMeta"]["nextReleaseVersion"] == "foobar@2.0.0"
assert response.data["currentProjectMeta"]["prevReleaseVersion"] == "foobar@3.0.0"
Expand Down Expand Up @@ -581,7 +583,7 @@ def test_get_first_and_last_release_on_date_sort_env_filter_applied(self):
"version": release_3.version,
},
)
response = self.client.get(url, {"project": self.project1.id, "environment": ["prod"]})
response = self.client.get(url, {"project": str(self.project1.id), "environment": ["prod"]})
assert response.status_code == 200
assert response.data["currentProjectMeta"]["firstReleaseVersion"] == "foobar@2.0.0"
assert response.data["currentProjectMeta"]["lastReleaseVersion"] == "foobar@3.0.0"
Expand All @@ -607,7 +609,7 @@ def test_get_first_and_last_release_on_non_date_sort(self):
"version": release_1.version,
},
)
response = self.client.get(url, {"project": self.project1.id, "sort": "sessions"})
response = self.client.get(url, {"project": str(self.project1.id), "sort": "sessions"})
assert response.status_code == 400
assert response.data["detail"] == "invalid sort"

Expand All @@ -632,7 +634,7 @@ def test_get_first_and_last_release_when_project_has_no_releases(self):
"sentry-api-0-organization-release-details",
kwargs={"organization_id_or_slug": self.organization.slug, "version": release.version},
)
response = self.client.get(url, {"project": self.project1.id, "environment": ["test"]})
response = self.client.get(url, {"project": str(self.project1.id), "environment": ["test"]})
assert response.status_code == 200
assert response.data["currentProjectMeta"]["firstReleaseVersion"] is None
assert response.data["currentProjectMeta"]["lastReleaseVersion"] is None
Expand Down
2 changes: 1 addition & 1 deletion tests/sentry/features/test_flagpole_context.py
Original file line number Diff line number Diff line change
Expand Up @@ -40,7 +40,7 @@ def test_with_invalid_organization(self):
organization_context_transformer(SentryContextData(organization=1234)) # type: ignore[arg-type]

with pytest.raises(InvalidContextDataException):
organization_context_transformer(SentryContextData(organization=self.create_project()))
organization_context_transformer(SentryContextData(organization=self.create_project())) # type: ignore[arg-type]

def test_with_valid_organization(self):
org = self.create_organization(slug="foobar", name="Foo Bar")
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -64,7 +64,7 @@ def setup_project_and_rules(self):
organization=self.org, teams=[self.team], name="Elephant"
)
self.projects = [self.project, self.project2]
self.project_ids = [self.project.id, self.project2.id]
self.project_ids = [str(self.project.id), str(self.project2.id)]
self.alert_rule = self.create_alert_rule(
name="alert rule",
organization=self.org,
Expand Down Expand Up @@ -128,7 +128,12 @@ def test_limit_as_1_with_paging_sort_name(self):
self.setup_project_and_rules()
# Test Limit as 1, no cursor:
with self.feature(["organizations:incidents", "organizations:performance-view"]):
request_data = {"per_page": "1", "project": self.project.id, "sort": "name", "asc": 1}
request_data = {
"per_page": "1",
"project": str(self.project.id),
"sort": "name",
"asc": "1",
}
response = self.client.get(
path=self.combined_rules_url, data=request_data, content_type="application/json"
)
Expand All @@ -145,9 +150,9 @@ def test_limit_as_1_with_paging_sort_name(self):
request_data = {
"cursor": next_cursor,
"per_page": "1",
"project": self.project.id,
"project": str(self.project.id),
"sort": "name",
"asc": 1,
"asc": "1",
}
response = self.client.get(
path=self.combined_rules_url, data=request_data, content_type="application/json"
Expand Down Expand Up @@ -183,7 +188,12 @@ def test_limit_as_1_with_paging_sort_name_urlencode(self):
# Test Limit as 1, no cursor:
url = f"/api/0/organizations/{self.org.slug}/combined-rules/"
with self.feature(["organizations:incidents", "organizations:performance-view"]):
request_data = {"per_page": "1", "project": self.project.id, "sort": "name", "asc": 1}
request_data = {
"per_page": "1",
"project": str(self.project.id),
"sort": "name",
"asc": "1",
}
response = self.client.get(
path=url,
data=request_data,
Expand All @@ -202,9 +212,9 @@ def test_limit_as_1_with_paging_sort_name_urlencode(self):
request_data = {
"cursor": next_cursor,
"per_page": "1",
"project": self.project.id,
"project": str(self.project.id),
"sort": "name",
"asc": 1,
"asc": "1",
}
response = self.client.get(path=url, data=request_data, content_type="application/json")
assert response.status_code == 200
Expand Down Expand Up @@ -234,7 +244,7 @@ def test_limit_as_1_with_paging(self):

# Test Limit as 1, next page of previous request:
with self.feature(["organizations:incidents", "organizations:performance-view"]):
request_data = {"cursor": next_cursor, "per_page": "1", "project": self.project.id}
request_data = {"cursor": next_cursor, "per_page": "1", "project": str(self.project.id)}
response = self.client.get(
path=self.combined_rules_url, data=request_data, content_type="application/json"
)
Expand Down Expand Up @@ -561,7 +571,7 @@ def test_myteams_filter_superuser(self):
with self.feature(["organizations:incidents", "organizations:performance-view"]):
request_data = {
"per_page": "10",
"project": [another_project.id],
"project": [str(another_project.id)],
"team": ["myteams"],
}
response = self.client.get(
Expand All @@ -573,7 +583,7 @@ def test_myteams_filter_superuser(self):
with self.feature(["organizations:incidents", "organizations:performance-view"]):
request_data = {
"per_page": "10",
"project": [another_project.id],
"project": [str(another_project.id)],
"team": [another_org_team.id],
}
response = self.client.get(
Expand Down
4 changes: 2 additions & 2 deletions tests/sentry/issues/test_escalating.py
Original file line number Diff line number Diff line change
Expand Up @@ -233,8 +233,8 @@ def test_query_different_orgs(self, mock_logger: MagicMock) -> None:
org_b = self.create_organization()
proj_b = self.create_project(organization=org_b)

event1 = self._create_events_for_group(project_id=proj_a, hours_ago=1)
event_proj_org_b_1 = self._create_events_for_group(project_id=proj_b, hours_ago=1)
event1 = self._create_events_for_group(project_id=proj_a.id, hours_ago=1)
event_proj_org_b_1 = self._create_events_for_group(project_id=proj_b.id, hours_ago=1)

# Since proj_org_b is created
assert query_groups_past_counts(Group.objects.all()) == [
Expand Down
19 changes: 11 additions & 8 deletions tests/sentry/receivers/test_onboarding.py
Original file line number Diff line number Diff line change
Expand Up @@ -11,6 +11,7 @@
OnboardingTaskStatus,
OrganizationOnboardingTask,
)
from sentry.models.project import Project
from sentry.models.rule import Rule
from sentry.plugins.bases.issue import IssueTrackingPlugin
from sentry.services.hybrid_cloud.organization import organization_service
Expand Down Expand Up @@ -684,8 +685,10 @@ def test_old_project_sending_minified_stack_trace_event(self, record_analytics):
]
}

# project.flags.has_minified_stack_trace = False
assert not project.flags.has_minified_stack_trace
def _project_has_minified_stack_trace(p: Project) -> bool:
return p.flags.has_minified_stack_trace

assert not _project_has_minified_stack_trace(project)

# Store event
self.store_event(
Expand All @@ -695,8 +698,7 @@ def test_old_project_sending_minified_stack_trace_event(self, record_analytics):

project.refresh_from_db()

# project.flags.has_minified_stack_trace = True
assert project.flags.has_minified_stack_trace
assert _project_has_minified_stack_trace(project)

# The analytic's event "first_event_with_minified_stack_trace_for_project" shall not be sent
count = 0
Expand Down Expand Up @@ -866,16 +868,17 @@ def test_old_project_sending_sourcemap_event(self, record_analytics):
]
}

# project.flags.has_sourcemaps = False
assert not project.flags.has_sourcemaps
def _project_has_sourcemaps(p: Project) -> bool:
return project.flags.has_sourcemaps

assert not _project_has_sourcemaps(project)

event = self.store_event(project_id=project.id, data=data)
event_processed.send(project=project, event=event, sender=type(project))

project.refresh_from_db()

# project.flags.has_sourcemaps = True
assert project.flags.has_sourcemaps
assert _project_has_sourcemaps(project)

# The analytic's event "first_event_with_minified_stack_trace_for_project" shall not be sent
count = 0
Expand Down
Loading
Loading