Skip to content

Commit

Permalink
Merge branch 'master' into master
Browse files Browse the repository at this point in the history
  • Loading branch information
giovanni-guidini authored Apr 5, 2024
2 parents 12f0eb9 + 00ad9db commit c2d9747
Show file tree
Hide file tree
Showing 121 changed files with 2,864 additions and 1,246 deletions.
39 changes: 4 additions & 35 deletions .envrc
Original file line number Diff line number Diff line change
Expand Up @@ -27,11 +27,8 @@ source "${SENTRY_ROOT}/scripts/lib.sh"
# consequently, using "exit" anywhere will skip this notice from showing.
# so need to use set -e, and return 1.
trap notice ERR
# This is used to group issues on Sentry.io.
# If an issue does not call info() or die() it will be grouped under this
error_message="Unknown issue"
# This has to be the same value as what sentry-cli accepts
log_level="info"

complete_success="yup"

help_message() {
cat <<EOF
Expand All @@ -52,17 +49,6 @@ EOF
notice() {
[ $? -eq 0 ] && return
failure_message
[ -z "${SENTRY_DEVENV_NO_REPORT+x}" ] && report_to_sentry
}

report_to_sentry() {
if ! require sentry-cli; then
curl -sL https://sentry.io/get-cli/ | bash
fi
# Report to sentry-dev-env project
SENTRY_DSN="https://[email protected]/5723503" \
sentry-cli send-event -m "$error_message" --logfile "$_SENTRY_LOG_FILE" --level "$log_level"
rm "$_SENTRY_LOG_FILE"
}

debug() {
Expand All @@ -77,15 +63,11 @@ info() {

warn() {
echo -e "${yellow}${*}${reset}" >&2
log_level="warning"
complete_success="nope"
}

die() {
echo -e "${red}${bold}FATAL: ${*}${reset}" >&2
# When reporting to Sentry, this will allow grouping the errors differently
# NOTE: The first line of the output is used to group issues
error_message=("${@}")
log_level="error"
return 1
}

Expand Down Expand Up @@ -139,19 +121,6 @@ else
export SENTRY_DEVSERVICES_DSN=https://[email protected]/1492057
fi

# We can remove these lines in few months
if [ "$SHELL" == "/bin/zsh" ]; then
zshrc_path="${HOME}/.zshrc"
header="# Apple M1 environment variables"
if grep -qF "${header}" "${zshrc_path}"; then
echo -e "\n${red}Please delete from ${zshrc_path}, the following three lines:${reset}"
echo -e "${header}
export CPATH=/opt/homebrew/Cellar/librdkafka/1.8.2/include
export LDFLAGS=-L/opt/homebrew/Cellar/gettext/0.21/lib"
echo -e "\nWe have moved exporting of these variables to the right place."
return 1
fi
fi

### System ###

Expand Down Expand Up @@ -233,7 +202,7 @@ if [ ${#commands_to_run[@]} -ne 0 ]; then
show_commands_info
fi

if [ "${log_level}" != "info" ]; then
if [ "${complete_success}" != "yup" ]; then
help_message
warn "\nPartial success. The virtualenv is active, however, you're not fully up-to-date (see messages above)."
else
Expand Down
2 changes: 1 addition & 1 deletion .github/CODEOWNERS
Validating CODEOWNERS rules …
Original file line number Diff line number Diff line change
Expand Up @@ -13,7 +13,7 @@

## Snuba
/src/sentry/eventstream/ @getsentry/owners-snuba
/src/sentry/consumers/ @getsentry/owners-snuba
/src/sentry/consumers/ @getsentry/ops @getsentry/owners-snuba
/src/sentry/post_process_forwarder/ @getsentry/owners-snuba
/src/sentry/utils/snuba.py @getsentry/owners-snuba @getsentry/performance
/src/sentry/utils/snql.py @getsentry/owners-snuba
Expand Down
2 changes: 1 addition & 1 deletion migrations_lockfile.txt
Original file line number Diff line number Diff line change
Expand Up @@ -9,5 +9,5 @@ feedback: 0004_index_together
hybridcloud: 0015_apitokenreplica_hashed_token_index
nodestore: 0002_nodestore_no_dictfield
replays: 0004_index_together
sentry: 0687_alert_rule_project_backfill_migration
sentry: 0688_add_project_flag_high_priority_alerts
social_auth: 0002_default_auto_field
4 changes: 0 additions & 4 deletions scripts/do.sh
Original file line number Diff line number Diff line change
Expand Up @@ -9,10 +9,6 @@ HERE="$(
# shellcheck disable=SC1090
source "${HERE}/lib.sh"

# This block is to enable reporting issues to Sentry.io
# SENTRY_DSN already defined in .envrc
configure-sentry-cli

# This guarantees that we're within a venv. A caller that is not within
# a venv can avoid enabling this by setting SENTRY_NO_VENV_CHECK
[ -z "${SENTRY_NO_VENV_CHECK+x}" ] && eval "${HERE}/ensure-venv.sh"
Expand Down
12 changes: 0 additions & 12 deletions scripts/lib.sh
Original file line number Diff line number Diff line change
Expand Up @@ -34,18 +34,6 @@ require() {
command -v "$1" >/dev/null 2>&1
}

configure-sentry-cli() {
if [ -z "${SENTRY_DEVENV_NO_REPORT+x}" ]; then
if ! require sentry-cli; then
if [ -f "${venv_name}/bin/pip" ]; then
pip-install sentry-cli
else
curl -sL https://sentry.io/get-cli/ | SENTRY_CLI_VERSION=2.14.4 bash
fi
fi
fi
}

query-valid-python-version() {
python_version=$(python3 -V 2>&1 | awk '{print $2}')
if [[ -n "${SENTRY_PYTHON_VERSION:-}" ]]; then
Expand Down
18 changes: 18 additions & 0 deletions src/sentry/api/endpoints/internal/integration_proxy.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,14 +2,20 @@

import logging
from collections import defaultdict
from collections.abc import Mapping
from typing import Any
from urllib.parse import urljoin

from django.http import HttpRequest, HttpResponse, HttpResponseBadRequest
from requests import Request, Response
from rest_framework.request import Request as DRFRequest
from rest_framework.response import Response as DRFResponse
from sentry_sdk import Scope

from sentry.api.api_owners import ApiOwner
from sentry.api.api_publish_status import ApiPublishStatus
from sentry.api.base import Endpoint, control_silo_endpoint
from sentry.auth.exceptions import IdentityNotValid
from sentry.constants import ObjectStatus
from sentry.models.integrations.organization_integration import OrganizationIntegration
from sentry.silo.base import SiloMode
Expand Down Expand Up @@ -221,3 +227,15 @@ def http_method_not_allowed(self, request):
)
logger.info("proxy_success", extra=self.log_extra)
return response

def handle_exception( # type: ignore[override]
self,
request: DRFRequest,
exc: Exception,
handler_context: Mapping[str, Any] | None = None,
scope: Scope | None = None,
) -> DRFResponse:
if isinstance(exc, IdentityNotValid):
return self.respond(status=400)

return super().handle_exception(request, exc, handler_context, scope)
1 change: 1 addition & 0 deletions src/sentry/api/endpoints/organization_events.py
Original file line number Diff line number Diff line change
Expand Up @@ -46,6 +46,7 @@
Referrer.API_PERFORMANCE_STATUS_BREAKDOWN.value,
Referrer.API_PERFORMANCE_VITAL_DETAIL.value,
Referrer.API_PERFORMANCE_DURATIONPERCENTILECHART.value,
Referrer.API_PERFORMANCE_TRANSACTIONS_STATISTICAL_DETECTOR_ROOT_CAUSE_ANALYSIS.value,
Referrer.API_PROFILING_LANDING_TABLE.value,
Referrer.API_PROFILING_LANDING_FUNCTIONS_CARD.value,
Referrer.API_PROFILING_PROFILE_SUMMARY_TOTALS.value,
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -399,13 +399,15 @@ class ExampleSpan:
start_timestamp: float
finish_timestamp: float
exclusive_time: float
trace_id: str

def serialize(self) -> Any:
return {
"id": self.id,
"startTimestamp": self.start_timestamp,
"finishTimestamp": self.finish_timestamp,
"exclusiveTime": self.exclusive_time,
"trace": self.trace_id,
}


Expand Down Expand Up @@ -803,6 +805,7 @@ def get_example_transaction(
start_timestamp=span["start_timestamp"],
finish_timestamp=span["timestamp"],
exclusive_time=span["exclusive_time"],
trace_id=trace_context["trace_id"],
)
for span in matching_spans
]
Expand Down
10 changes: 10 additions & 0 deletions src/sentry/api/endpoints/project_events.py
Original file line number Diff line number Diff line change
Expand Up @@ -11,6 +11,7 @@
from sentry.api.base import region_silo_endpoint
from sentry.api.bases.project import ProjectEndpoint
from sentry.api.serializers import EventSerializer, SimpleEventSerializer, serialize
from sentry.snuba.events import Columns
from sentry.types.ratelimit import RateLimit, RateLimitCategory


Expand Down Expand Up @@ -42,6 +43,9 @@ def get(self, request: Request, project) -> Response:
include the full event body, including the stacktrace.
Set to 1 to enable.
:qparam bool sample: return events in pseudo-random order. This is deterministic,
same query will return the same events in the same order.
:pparam string organization_slug: the slug of the organization the
groups belong to.
:pparam string project_slug: the slug of the project the groups
Expand All @@ -61,6 +65,7 @@ def get(self, request: Request, project) -> Response:
event_filter.start = timezone.now() - timedelta(days=7)

full = request.GET.get("full", False)
sample = request.GET.get("sample", False)

data_fn = partial(
eventstore.backend.get_events,
Expand All @@ -69,6 +74,11 @@ def get(self, request: Request, project) -> Response:
tenant_ids={"organization_id": project.organization_id},
)

if sample:
# not a true random ordering, but event_id is UUID, that's random enough
# for our purposes and doesn't have heavy performance impact
data_fn = partial(data_fn, orderby=[Columns.EVENT_ID.value.alias])

serializer = EventSerializer() if full else SimpleEventSerializer()
return self.paginate(
request=request,
Expand Down
28 changes: 28 additions & 0 deletions src/sentry/buffer/redis.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,6 +3,7 @@
import logging
import pickle
import threading
from collections.abc import Callable
from datetime import date, datetime, timezone
from enum import Enum
from time import time
Expand Down Expand Up @@ -52,6 +53,33 @@ def _validate_json_roundtrip(value: dict[str, Any], model: type[models.Model]) -
logger.exception("buffer.invalid_value", extra={"value": value, "model": model})


class BufferHookEvent(Enum):
FLUSH = "flush"


class BufferHookRegistry:
def __init__(self, *args: Any, **kwargs: Any) -> None:
self._registry: dict[BufferHookEvent, Callable[..., Any]] = {}

def add_handler(self, key: BufferHookEvent) -> Callable[..., Any]:
def decorator(func: Callable[..., Any]) -> Callable[..., Any]:
self._registry[key] = func
return func

return decorator

def callback(self, buffer_hook_event: BufferHookEvent, data: RedisBuffer) -> bool:
try:
callback = self._registry[buffer_hook_event]
except KeyError:
return False

return callback(data)


redis_buffer_registry = BufferHookRegistry()


class RedisOperation(Enum):
SET_ADD = "sadd"
SET_GET = "smembers"
Expand Down
2 changes: 2 additions & 0 deletions src/sentry/conf/server.py
Original file line number Diff line number Diff line change
Expand Up @@ -1527,6 +1527,8 @@ def custom_parameter_sort(parameter: dict) -> tuple[str, int]:
"organizations:ddm-metrics-api-unit-normalization": False,
# Enables import of metric dashboards
"organizations:ddm-dashboard-import": False,
# Enables category "metrics" in stats_v2 endpoint
"organizations:metrics-stats": False,
# Enable the default alert at project creation to be the high priority alert
"organizations:default-high-priority-alerts": False,
# Enables automatically deriving of code mappings
Expand Down
40 changes: 23 additions & 17 deletions src/sentry/event_manager.py
Original file line number Diff line number Diff line change
Expand Up @@ -50,23 +50,29 @@
GroupingConfig,
get_grouping_config_dict_for_project,
)
from sentry.grouping.ingest import (
add_group_id_to_grouphashes,
check_for_category_mismatch,
check_for_group_creation_load_shed,
extract_hashes,
from sentry.grouping.ingest.config import (
is_in_transition,
project_uses_optimized_grouping,
update_grouping_config_if_needed,
)
from sentry.grouping.ingest.hashing import (
find_existing_grouphash,
find_existing_grouphash_new,
get_hash_values,
is_in_transition,
maybe_run_background_grouping,
maybe_run_secondary_grouping,
project_uses_optimized_grouping,
run_primary_grouping,
)
from sentry.grouping.ingest.metrics import (
record_calculation_metric_with_result,
record_hash_calculation_metrics,
record_new_group_metrics,
run_primary_grouping,
update_grouping_config_if_needed,
)
from sentry.grouping.ingest.utils import (
add_group_id_to_grouphashes,
check_for_category_mismatch,
check_for_group_creation_load_shed,
extract_hashes,
)
from sentry.grouping.result import CalculatedHashes
from sentry.ingest.inbound_filters import FilterStatKeys
Expand Down Expand Up @@ -1457,8 +1463,8 @@ def _save_aggregate(
metrics.timer("event_manager.create_group_transaction") as metric_tags,
transaction.atomic(router.db_for_write(GroupHash)),
):
span.set_tag("create_group_transaction.outcome", "no_group")
metric_tags["create_group_transaction.outcome"] = "no_group"
span.set_tag("outcome", "wait_for_lock")
metric_tags["outcome"] = "wait_for_lock"

all_grouphash_ids = [h.id for h in flat_grouphashes]
if root_hierarchical_grouphash is not None:
Expand Down Expand Up @@ -1496,8 +1502,8 @@ def _save_aggregate(
is_new = True
is_regression = False

span.set_tag("create_group_transaction.outcome", "new_group")
metric_tags["create_group_transaction.outcome"] = "new_group"
span.set_tag("outcome", "new_group")
metric_tags["outcome"] = "new_group"
record_calculation_metric_with_result(
project=project,
has_secondary_hashes=has_secondary_hashes,
Expand Down Expand Up @@ -1782,8 +1788,8 @@ def create_group_with_grouphashes(
metrics.timer("event_manager.create_group_transaction") as metrics_timer_tags,
transaction.atomic(router.db_for_write(GroupHash)),
):
span.set_tag("create_group_transaction.outcome", "no_group")
metrics_timer_tags["create_group_transaction.outcome"] = "no_group"
span.set_tag("outcome", "wait_for_lock")
metrics_timer_tags["outcome"] = "wait_for_lock"

# If we're in this branch, we checked our grouphashes and didn't find one with a group
# attached. We thus want to create a new group, but we need to guard against another
Expand All @@ -1810,8 +1816,8 @@ def create_group_with_grouphashes(
# If we still haven't found a matching grouphash, we're now safe to go ahead and create
# the group.
if existing_grouphash is None:
span.set_tag("create_group_transaction.outcome", "new_group")
metrics_timer_tags["create_group_transaction.outcome"] = "new_group"
span.set_tag("outcome", "new_group")
metrics_timer_tags["outcome"] = "new_group"
record_new_group_metrics(event)

group = _create_group(project, event, **group_processing_kwargs)
Expand Down
1 change: 1 addition & 0 deletions src/sentry/features/temporary.py
Original file line number Diff line number Diff line change
Expand Up @@ -60,6 +60,7 @@ def register_temporary_features(manager: FeatureManager):
manager.add("organizations:ddm-experimental", OrganizationFeature, FeatureHandlerStrategy.REMOTE)
manager.add("organizations:ddm-metrics-api-unit-normalization", OrganizationFeature, FeatureHandlerStrategy.REMOTE)
manager.add("organizations:ddm-sidebar-item-hidden", OrganizationFeature, FeatureHandlerStrategy.REMOTE)
manager.add("organizations:metrics-stats", OrganizationFeature, FeatureHandlerStrategy.REMOTE)
manager.add("organizations:ddm-ui", OrganizationFeature, FeatureHandlerStrategy.INTERNAL)
manager.add("organizations:default-high-priority-alerts", OrganizationFeature, FeatureHandlerStrategy.INTERNAL)
manager.add("organizations:deprecate-fid-from-performance-score", OrganizationFeature, FeatureHandlerStrategy.INTERNAL)
Expand Down
Empty file.
Loading

0 comments on commit c2d9747

Please sign in to comment.