Skip to content

Commit

Permalink
Revert "ref(querybuilder): Remove params from snuba modules (getsentr…
Browse files Browse the repository at this point in the history
…y#76198)"

This reverts commit 3e923c3.

Co-authored-by: ceorourke <[email protected]>
  • Loading branch information
getsentry-bot and ceorourke committed Aug 16, 2024
1 parent 68f5bef commit f4a5282
Show file tree
Hide file tree
Showing 50 changed files with 1,460 additions and 1,224 deletions.
1 change: 1 addition & 0 deletions src/sentry/api/endpoints/organization_events.py
Original file line number Diff line number Diff line change
Expand Up @@ -399,6 +399,7 @@ def _data_fn(scoped_dataset, offset, limit, query) -> dict[str, Any]:
return scoped_dataset.query(
selected_columns=self.get_field_list(organization, request),
query=query,
params={},
snuba_params=snuba_params,
equations=self.get_equation_list(organization, request),
orderby=self.get_orderby(request),
Expand Down
1 change: 1 addition & 0 deletions src/sentry/api/endpoints/organization_events_facets.py
Original file line number Diff line number Diff line change
Expand Up @@ -36,6 +36,7 @@ def data_fn(offset, limit):
with handle_query_errors():
facets = discover.get_facets(
query=request.GET.get("query"),
params={},
snuba_params=snuba_params,
referrer="api.organization-events-facets.top-tags",
per_page=limit,
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -344,6 +344,7 @@ def query_top_tags(
"array_join(tags.value) as tags_value",
],
query=filter_query,
params={},
snuba_params=snuba_params,
orderby=orderby,
conditions=[
Expand Down Expand Up @@ -523,6 +524,7 @@ def query_facet_performance_key_histogram(
results = discover.histogram_query(
fields=[aggregate_column],
user_query=filter_query,
params={},
snuba_params=snuba_params,
num_buckets=num_buckets_per_key,
precision=precision,
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -111,6 +111,7 @@ def get(self, request: Request, organization) -> Response:
results = discover.query(
selected_columns=["id"],
query=query,
params={},
snuba_params=snuba_params,
limit=1, # Just want to check for existence of such an event
referrer="api.events.measurements",
Expand Down
1 change: 1 addition & 0 deletions src/sentry/api/endpoints/organization_events_histogram.py
Original file line number Diff line number Diff line change
Expand Up @@ -81,6 +81,7 @@ def get(self, request: Request, organization) -> Response:
results = dataset.histogram_query(
fields=data["field"],
user_query=data.get("query"),
params={},
snuba_params=snuba_params,
num_buckets=data["numBuckets"],
precision=data["precision"],
Expand Down
4 changes: 4 additions & 0 deletions src/sentry/api/endpoints/organization_events_meta.py
Original file line number Diff line number Diff line change
Expand Up @@ -38,6 +38,7 @@ def get(self, request: Request, organization) -> Response:
with handle_query_errors():
result = dataset.query(
selected_columns=["count()"],
params={},
snuba_params=snuba_params,
query=request.query_params.get("query"),
referrer=Referrer.API_ORGANIZATION_EVENTS_META.value,
Expand Down Expand Up @@ -152,6 +153,7 @@ def get(self, request: Request, organization) -> Response:
f"p50({column}) as first_bound",
f"p95({column}) as second_bound",
],
params={},
snuba_params=snuba_params,
query=request.query_params.get("query"),
referrer=Referrer.API_SPAN_SAMPLE_GET_BOUNDS.value,
Expand All @@ -174,6 +176,7 @@ def get(self, request: Request, organization) -> Response:
"profile_id",
],
orderby=["-profile_id"],
params={},
snuba_params=snuba_params,
query=request.query_params.get("query"),
referrer=Referrer.API_SPAN_SAMPLE_GET_SPAN_IDS.value,
Expand All @@ -197,6 +200,7 @@ def get(self, request: Request, organization) -> Response:
result = spans_indexed.query(
selected_columns=selected_columns,
orderby=["timestamp"],
params={},
snuba_params=snuba_params,
query=query,
limit=9,
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -199,6 +199,7 @@ def get(self, request, organization):
transaction_count_query = metrics_query(
["count()"],
f'event.type:transaction transaction:"{transaction_name}"',
params={},
referrer=BASE_REFERRER,
snuba_params=snuba_params,
)
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -64,6 +64,7 @@ def get(self, request: Request, organization) -> Response:
results = discover.spans_histogram_query(
span=data["span"],
user_query=data.get("query"),
params={},
snuba_params=snuba_params,
num_buckets=data["numBuckets"],
precision=data["precision"],
Expand Down
2 changes: 2 additions & 0 deletions src/sentry/api/endpoints/organization_events_stats.py
Original file line number Diff line number Diff line change
Expand Up @@ -288,6 +288,7 @@ def _get_event_stats(
selected_columns=self.get_field_list(organization, request),
equations=self.get_equation_list(organization, request),
user_query=query,
params={},
snuba_params=snuba_params,
orderby=self.get_orderby(request),
rollup=rollup,
Expand All @@ -305,6 +306,7 @@ def _get_event_stats(
return scoped_dataset.timeseries_query(
selected_columns=query_columns,
query=query,
params={},
snuba_params=snuba_params,
rollup=rollup,
referrer=referrer,
Expand Down
3 changes: 2 additions & 1 deletion src/sentry/api/endpoints/organization_events_trends.py
Original file line number Diff line number Diff line change
Expand Up @@ -544,11 +544,12 @@ def get_event_stats(
query_columns,
selected_columns,
query,
snuba_params,
{},
orderby,
rollup,
min(5, len(events_results["data"])),
organization,
snuba_params=snuba_params,
top_events=events_results,
referrer="api.trends.get-event-stats",
zerofill_results=zerofill_results,
Expand Down
2 changes: 2 additions & 0 deletions src/sentry/api/endpoints/organization_events_trends_v2.py
Original file line number Diff line number Diff line change
Expand Up @@ -102,6 +102,7 @@ def get_top_events(user_query, snuba_params, event_limit, referrer):
return metrics_query(
top_event_columns,
query=user_query,
params={},
snuba_params=snuba_params,
orderby=["-count()"],
limit=event_limit,
Expand Down Expand Up @@ -148,6 +149,7 @@ def get_timeseries(top_events, _, rollup, zerofill_results):
result = metrics_performance.bulk_timeseries_query(
timeseries_columns,
queries,
params={},
snuba_params=pruned_snuba_params,
rollup=rollup,
zerofill_results=zerofill_results,
Expand Down
1 change: 1 addition & 0 deletions src/sentry/api/endpoints/organization_events_vitals.py
Original file line number Diff line number Diff line change
Expand Up @@ -69,6 +69,7 @@ def get(self, request: Request, organization) -> Response:
events_results = dataset.query(
selected_columns=selected_columns,
query=request.GET.get("query"),
params={},
snuba_params=snuba_params,
# Results should only ever have 1 result
limit=1,
Expand Down
2 changes: 2 additions & 0 deletions src/sentry/api/endpoints/organization_metrics_meta.py
Original file line number Diff line number Diff line change
Expand Up @@ -47,6 +47,7 @@ def get(self, request: Request, organization: Organization) -> Response:
count_null,
count_has_txn,
],
params={},
snuba_params=snuba_params,
query=f"{count_null}:0 AND {count_has_txn}:>0",
referrer="api.organization-events-metrics-compatibility.compatible",
Expand Down Expand Up @@ -93,6 +94,7 @@ def get(self, request: Request, organization: Organization) -> Response:
with handle_query_errors():
sum_metrics = metrics_performance.query(
selected_columns=[COUNT_UNPARAM, COUNT_NULL, "count()"],
params={},
snuba_params=snuba_params,
query="",
referrer="api.organization-events-metrics-compatibility.sum_metrics",
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -180,6 +180,7 @@ def get_discover_stats(
return module.timeseries_query(
selected_columns=query_columns,
query=query,
params={},
snuba_params=snuba_params,
rollup=rollup,
referrer=Referrer.API_ORGANIZATION_METRICS_ESTIMATION_STATS.value,
Expand Down
2 changes: 2 additions & 0 deletions src/sentry/api/endpoints/organization_profiling_functions.py
Original file line number Diff line number Diff line change
Expand Up @@ -103,6 +103,7 @@ def get(self, request: Request, organization: Organization) -> Response:
"examples()",
],
query=data.get("query"),
params={},
snuba_params=snuba_params,
orderby=["-count()"],
limit=TOP_FUNCTIONS_LIMIT,
Expand Down Expand Up @@ -153,6 +154,7 @@ def get_event_stats(
formatted_results = functions.format_top_events_timeseries_results(
result,
builder,
params={},
rollup=rollup,
snuba_params=snuba_params,
top_events={"data": chunk},
Expand Down
13 changes: 6 additions & 7 deletions src/sentry/api/endpoints/organization_sdk_updates.py
Original file line number Diff line number Diff line change
Expand Up @@ -14,7 +14,6 @@
from sentry.api.bases.organization import OrganizationEndpoint
from sentry.api.utils import handle_query_errors
from sentry.sdk_updates import SdkIndexState, SdkSetupState, get_sdk_index, get_suggested_updates
from sentry.search.events.types import SnubaParams
from sentry.snuba import discover
from sentry.utils.numbers import format_grouped_length

Expand Down Expand Up @@ -99,12 +98,12 @@ def get(self, request: Request, organization) -> Response:
"last_seen()",
],
orderby=["-project"],
snuba_params=SnubaParams(
start=timezone.now() - timedelta(days=1),
end=timezone.now(),
organization=organization,
projects=projects,
),
params={
"start": timezone.now() - timedelta(days=1),
"end": timezone.now(),
"organization_id": organization.id,
"project_id": [p.id for p in projects],
},
referrer="api.organization-sdk-updates",
)

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -133,6 +133,7 @@ def get(self, request: Request, organization) -> Response:
snuba_response = timeseries_query(
selected_columns=["count()"],
query=query,
params={},
snuba_params=snuba_params,
rollup=time_params.granularity,
referrer="transaction-anomaly-detection",
Expand Down
27 changes: 13 additions & 14 deletions src/sentry/api/serializers/models/project.py
Original file line number Diff line number Diff line change
Expand Up @@ -38,7 +38,6 @@
from sentry.models.userreport import UserReport
from sentry.release_health.base import CurrentAndPreviousCrashFreeRate
from sentry.roles import organization_roles
from sentry.search.events.types import SnubaParams
from sentry.snuba import discover
from sentry.users.models.user import User

Expand Down Expand Up @@ -345,9 +344,9 @@ def measure_span(op_tag):
project_ids = [o.id for o in item_list]

if self.stats_period:
stats = self.get_stats(item_list, "!event.type:transaction")
stats = self.get_stats(project_ids, "!event.type:transaction")
if self._expand("transaction_stats"):
transaction_stats = self.get_stats(item_list, "event.type:transaction")
transaction_stats = self.get_stats(project_ids, "event.type:transaction")
if self._expand("session_stats"):
session_stats = self.get_session_stats(project_ids)

Expand Down Expand Up @@ -392,26 +391,26 @@ def measure_span(op_tag):
serialized["options"] = options[project.id]
return result

def get_stats(self, projects, query):
def get_stats(self, project_ids, query):
# we need to compute stats at 1d (1h resolution), and 14d
segments, interval = STATS_PERIOD_CHOICES[self.stats_period]
now = timezone.now()

snuba_params = SnubaParams(
projects=projects,
start=now - ((segments - 1) * interval),
end=now,
)
params = {
"project_id": project_ids,
"start": now - ((segments - 1) * interval),
"end": now,
}
if self.environment_id:
query = f"{query} environment:{self.environment_id}"

# Generate a query result to skip the top_events.find query
top_events = {"data": [{"project_id": p.id} for p in projects]}
top_events = {"data": [{"project_id": p} for p in project_ids]}
stats = self.dataset.top_events_timeseries(
timeseries_columns=["count()"],
selected_columns=["project_id"],
user_query=query,
snuba_params=snuba_params,
params=params,
orderby="project_id",
rollup=int(interval.total_seconds()),
limit=10000,
Expand All @@ -420,13 +419,13 @@ def get_stats(self, projects, query):
top_events=top_events,
)
results = {}
for project in projects:
for project_id in project_ids:
serialized = []
str_id = str(project.id)
str_id = str(project_id)
if str_id in stats:
for item in stats[str_id].data["data"]:
serialized.append((item["time"], item.get("count", 0)))
results[project.id] = serialized
results[project_id] = serialized
return results

def get_session_stats(
Expand Down
5 changes: 2 additions & 3 deletions src/sentry/data_export/endpoints/data_export.py
Original file line number Diff line number Diff line change
Expand Up @@ -102,7 +102,7 @@ def validate(self, data):
# validate the query string by trying to parse it
processor = DiscoverProcessor(
discover_query=query_info,
organization=organization,
organization_id=organization.id,
)
try:
query_builder_cls = DiscoverQueryBuilder
Expand All @@ -111,8 +111,7 @@ def validate(self, data):

builder = query_builder_cls(
SUPPORTED_DATASETS[dataset],
params={},
snuba_params=processor.snuba_params,
processor.params,
query=query_info["query"],
selected_columns=fields.copy(),
equations=equations,
Expand Down
34 changes: 17 additions & 17 deletions src/sentry/data_export/processors/discover.py
Original file line number Diff line number Diff line change
Expand Up @@ -7,7 +7,7 @@
from sentry.models.group import Group
from sentry.models.project import Project
from sentry.search.events.fields import get_function_alias
from sentry.search.events.types import SnubaParams
from sentry.search.events.types import ParamsType
from sentry.snuba import discover
from sentry.snuba.utils import get_dataset

Expand All @@ -21,20 +21,20 @@ class DiscoverProcessor:
Processor for exports of discover data based on a provided query
"""

def __init__(self, organization, discover_query):
self.projects = self.get_projects(organization.id, discover_query)
self.environments = self.get_environments(organization.id, discover_query)
def __init__(self, organization_id, discover_query):
self.projects = self.get_projects(organization_id, discover_query)
self.environments = self.get_environments(organization_id, discover_query)
self.start, self.end = get_date_range_from_params(discover_query)
self.snuba_params = SnubaParams(
organization=organization,
projects=self.projects,
start=self.start,
end=self.end,
)
self.params: ParamsType = {
"organization_id": organization_id,
"project_id": [project.id for project in self.projects],
"start": self.start,
"end": self.end,
}
# make sure to only include environment if any are given
# an empty list DOES NOT work
if self.environments:
self.snuba_params.environments = self.environments
self.params["environment"] = self.environments

equations = discover_query.get("equations", [])
self.header_fields = [get_function_alias(x) for x in discover_query["field"]] + equations
Expand All @@ -45,7 +45,7 @@ def __init__(self, organization, discover_query):
fields=discover_query["field"],
equations=equations,
query=discover_query["query"],
snuba_params=self.snuba_params,
params=self.params,
sort=discover_query.get("sort"),
dataset=discover_query.get("dataset"),
)
Expand Down Expand Up @@ -76,10 +76,10 @@ def get_environments(organization_id, query):
if set(requested_environments) != set(environment_names):
raise ExportError("Requested environment does not exist")

return environments
return environment_names

@staticmethod
def get_data_fn(fields, equations, query, snuba_params, sort, dataset):
def get_data_fn(fields, equations, query, params, sort, dataset):
dataset = get_dataset(dataset)
if dataset is None:
dataset = discover
Expand All @@ -89,7 +89,7 @@ def data_fn(offset, limit):
selected_columns=fields,
equations=equations,
query=query,
snuba_params=snuba_params,
params=params,
offset=offset,
orderby=sort,
limit=limit,
Expand All @@ -112,8 +112,8 @@ def handle_fields(self, result_list):
i.id: i.qualified_short_id
for i in Group.objects.filter(
id__in=issue_ids,
project__in=self.snuba_params.project_ids,
project__organization_id=self.snuba_params.organization_id,
project__in=self.params["project_id"],
project__organization_id=self.params["organization_id"],
)
}
for result in new_result_list:
Expand Down
Loading

0 comments on commit f4a5282

Please sign in to comment.