diff --git a/frontend/src/lib/api.ts b/frontend/src/lib/api.ts index 774bf3522ad..13d82a8ee10 100644 --- a/frontend/src/lib/api.ts +++ b/frontend/src/lib/api.ts @@ -113,6 +113,7 @@ import { } from '~/types' import { AlertType, AlertTypeWrite } from './components/Alerts/types' +import { ErrorTrackingStackFrame, ErrorTrackingSymbolSet } from './components/Errors/stackFrameLogic' import { ACTIVITY_PAGE_SIZE, DashboardPrivilegeLevel, @@ -723,6 +724,14 @@ class ApiRequest { return this.errorTracking().addPathComponent('stack_frames').withQueryString({ ids }) } + public symbolSets(): ApiRequest { + return this.errorTracking().withAction('symbol_sets') + } + + public symbolSetStackFrames(symbolSetId: string): ApiRequest { + return this.symbolSets().withAction(symbolSetId).withAction('stack_frames') + } + // # Warehouse public dataWarehouseTables(teamId?: TeamType['id']): ApiRequest { return this.projectsDetail(teamId).addPathComponent('warehouse_tables') @@ -1865,6 +1874,14 @@ const api = { async fetchStackFrames(ids: string[]): Promise<{ content: string }> { return await new ApiRequest().errorTrackingStackFrames(ids).get() }, + + async fetchSymbolSetStackFrames(symbolSetId: string): Promise { + return await new ApiRequest().symbolSetStackFrames(symbolSetId).get() + }, + + async fetchSymbolSets(): Promise { + return await new ApiRequest().symbolSets().get() + }, }, recordings: { diff --git a/frontend/src/lib/components/Errors/ErrorDisplay.tsx b/frontend/src/lib/components/Errors/ErrorDisplay.tsx index 604b80fcf68..0bc7ce4599d 100644 --- a/frontend/src/lib/components/Errors/ErrorDisplay.tsx +++ b/frontend/src/lib/components/Errors/ErrorDisplay.tsx @@ -11,15 +11,15 @@ import { useState } from 'react' import { EventType } from '~/types' -import { StackFrame } from './stackFrameLogic' +import { ErrorTrackingStackFrame } from './stackFrameLogic' interface RawStackTrace { type: 'raw' - frames: StackFrame[] + frames: ErrorTrackingStackFrame[] } interface ResolvedStackTrace { type: 'resolved' - frames: StackFrame[] + frames: ErrorTrackingStackFrame[] } interface Exception { @@ -29,7 +29,13 @@ interface Exception { value: string } -function StackTrace({ frames, showAllFrames }: { frames: StackFrame[]; showAllFrames: boolean }): JSX.Element | null { +function StackTrace({ + frames, + showAllFrames, +}: { + frames: ErrorTrackingStackFrame[] + showAllFrames: boolean +}): JSX.Element | null { const displayFrames = showAllFrames ? frames : frames.filter((f) => f.in_app) const panels = displayFrames.map(({ filename, lineno, colno, function: functionName }, index) => { diff --git a/frontend/src/lib/components/Errors/stackFrameLogic.tsx b/frontend/src/lib/components/Errors/stackFrameLogic.tsx index 3852055d12b..468859cf57c 100644 --- a/frontend/src/lib/components/Errors/stackFrameLogic.tsx +++ b/frontend/src/lib/components/Errors/stackFrameLogic.tsx @@ -4,19 +4,33 @@ import api from 'lib/api' import type { stackFrameLogicType } from './stackFrameLogicType' -export interface StackFrame { +export interface ErrorTrackingStackFrame { filename: string lineno: number colno: number function: string in_app?: boolean + raw_id: string + created_at: string + resolved: boolean + context: string | null + contents: Record // More flexible for varying content structures +} + +export interface ErrorTrackingSymbolSet { + id: string + ref: string + team_id: number + created_at: string + storage_ptr: string | null + failure_reason: string | null } export const stackFrameLogic = kea([ path(['components', 'Errors', 'stackFrameLogic']), loaders(({ values }) => ({ stackFrames: [ - {} as Record, + {} as Record, { loadFrames: async ({ frameIds }: { frameIds: string[] }) => { const loadedFrameIds = Object.keys(values.stackFrames) diff --git a/plugin-server/src/worker/ingestion/event-pipeline/runner.ts b/plugin-server/src/worker/ingestion/event-pipeline/runner.ts index 25b3d77d128..7f12b2d84a2 100644 --- a/plugin-server/src/worker/ingestion/event-pipeline/runner.ts +++ b/plugin-server/src/worker/ingestion/event-pipeline/runner.ts @@ -259,7 +259,7 @@ export class EventPipelineRunner { event.team_id ) - if (event.event === '$exception' && event.team_id == 2) { + if (event.event === '$exception') { const [exceptionAck] = await this.runStep( produceExceptionSymbolificationEventStep, [this, rawEvent], diff --git a/posthog/api/__init__.py b/posthog/api/__init__.py index b488304ab64..76a3c68e409 100644 --- a/posthog/api/__init__.py +++ b/posthog/api/__init__.py @@ -28,6 +28,7 @@ from . import ( dead_letter_queue, debug_ch_queries, early_access_feature, + error_tracking, event_definition, exports, feature_flag, @@ -499,12 +500,12 @@ projects_router.register( ["project_id"], ) -# projects_router.register( -# r"error_tracking", -# error_tracking.ErrorTrackingGroupViewSet, -# "project_error_tracking", -# ["team_id"], -# ) +projects_router.register( + r"error_tracking/symbol_sets", + error_tracking.ErrorTrackingSymbolSetViewSet, + "project_error_tracking_symbol_sets", + ["project_id"], +) projects_router.register( r"comments", diff --git a/posthog/api/error_tracking.py b/posthog/api/error_tracking.py index 339cb61e594..ed9cbbeb92c 100644 --- a/posthog/api/error_tracking.py +++ b/posthog/api/error_tracking.py @@ -1,57 +1,48 @@ -import structlog +from rest_framework import serializers, viewsets, mixins +from rest_framework.decorators import action +from rest_framework.response import Response +from django.db.models import QuerySet + +from posthog.api.routing import TeamAndOrgViewSetMixin +from posthog.models.error_tracking.error_tracking import ErrorTrackingSymbolSet, ErrorTrackingStackFrame -FIFTY_MEGABYTES = 50 * 1024 * 1024 - -logger = structlog.get_logger(__name__) +class ErrorTrackingStackFrameSerializer(serializers.ModelSerializer): + class Meta: + model = ErrorTrackingStackFrame + fields = ["id", "raw_id", "created_at", "contents", "resolved", "context"] -class ObjectStorageUnavailable(Exception): - pass +class ErrorTrackingSymbolSetSerializer(serializers.ModelSerializer): + class Meta: + model = ErrorTrackingSymbolSet + fields = ["id", "ref", "team_id", "created_at", "storage_ptr", "failure_reason"] + read_only_fields = ["team_id"] -# class ErrorTrackingGroupSerializer(serializers.ModelSerializer): -# class Meta: -# model = ErrorTrackingGroup -# fields = ["assignee", "status"] +class ErrorTrackingSymbolSetViewSet( + TeamAndOrgViewSetMixin, + mixins.ListModelMixin, + mixins.RetrieveModelMixin, + mixins.DestroyModelMixin, + viewsets.GenericViewSet, +): + scope_object = "query" + serializer_class = ErrorTrackingSymbolSetSerializer + queryset = ErrorTrackingSymbolSet.objects.all() + scope_object_read_actions = ["list", "retrieve", "stack_frames"] # Add this line -# class ErrorTrackingGroupViewSet(TeamAndOrgViewSetMixin, ForbidDestroyModel, viewsets.ModelViewSet): -# scope_object = "INTERNAL" -# queryset = ErrorTrackingGroup.objects.all() -# serializer_class = ErrorTrackingGroupSerializer + def safely_get_queryset(self, queryset: QuerySet) -> QuerySet: + return queryset.filter(team_id=self.team.id) -# def safely_get_object(self, queryset) -> QuerySet: -# stringified_fingerprint = self.kwargs["pk"] -# fingerprint = json.loads(urlsafe_base64_decode(stringified_fingerprint)) -# group, _ = queryset.get_or_create(fingerprint=fingerprint, team=self.team) -# return group + @action(methods=["GET"], detail=True) + def stack_frames(self, request, *args, **kwargs): + symbol_set = self.get_object() + frames = ErrorTrackingStackFrame.objects.filter(symbol_set=symbol_set, team_id=self.team.id) + serializer = ErrorTrackingStackFrameSerializer(frames, many=True) + return Response(serializer.data) -# @action(methods=["POST"], detail=True) -# def merge(self, request, **kwargs): -# group: ErrorTrackingGroup = self.get_object() -# merging_fingerprints: list[list[str]] = request.data.get("merging_fingerprints", []) -# group.merge(merging_fingerprints) -# return Response({"success": True}) - -# @action(methods=["POST"], detail=False) -# def upload_source_maps(self, request, **kwargs): -# try: -# if settings.OBJECT_STORAGE_ENABLED: -# file = request.FILES["source_map"] -# if file.size > FIFTY_MEGABYTES: -# raise ValidationError(code="file_too_large", detail="Source maps must be less than 50MB") - -# upload_path = ( -# f"{settings.OBJECT_STORAGE_ERROR_TRACKING_SOURCE_MAPS_FOLDER}/team-{self.team_id}/{file.name}" -# ) - -# object_storage.write(upload_path, file) -# return Response({"ok": True}, status=status.HTTP_201_CREATED) -# else: -# raise ObjectStorageUnavailable() -# except ObjectStorageUnavailable: -# raise ValidationError( -# code="object_storage_required", -# detail="Object storage must be available to allow source map uploads.", -# ) + def perform_destroy(self, instance): + # The related stack frames will be deleted via CASCADE + instance.delete() diff --git a/rust/cymbal/src/bin/generate_test_events.rs b/rust/cymbal/src/bin/generate_test_events.rs index 2e237a056d7..9be475f6ea9 100644 --- a/rust/cymbal/src/bin/generate_test_events.rs +++ b/rust/cymbal/src/bin/generate_test_events.rs @@ -7,6 +7,7 @@ use cymbal::{ }; use envconfig::Envconfig; use health::HealthRegistry; +use uuid::Uuid; const EXCEPTION_DATA: &str = include_str!("../../tests/static/raw_ch_exception_list.json"); @@ -19,13 +20,20 @@ async fn main() { .await; let producer = create_kafka_producer(&config, handle).await.unwrap(); - let exception: ClickHouseEvent = serde_json::from_str(EXCEPTION_DATA).unwrap(); - let exceptions = (0..10000).map(|_| exception.clone()).collect::>(); + let mut exception: ClickHouseEvent = serde_json::from_str(EXCEPTION_DATA).unwrap(); + exception.team_id = 1; + exception.project_id = 1; + let exceptions = (0..100).map(|_| exception.clone()).collect::>(); get_props(&exception).unwrap(); loop { println!("Sending {} exception kafka", exceptions.len()); - send_iter_to_kafka(&producer, "exception_symbolification_events", &exceptions) + let to_send = exceptions.iter().map(|e| { + let mut e = e.clone(); + e.uuid = Uuid::now_v7(); + e + }); + send_iter_to_kafka(&producer, "exception_symbolification_events", to_send) .await .unwrap(); tokio::time::sleep(std::time::Duration::from_secs(1)).await; diff --git a/rust/cymbal/src/bin/run.sh b/rust/cymbal/src/bin/run.sh index 694c0f9e5d8..e274454ca8b 100755 --- a/rust/cymbal/src/bin/run.sh +++ b/rust/cymbal/src/bin/run.sh @@ -4,4 +4,4 @@ export OBJECT_STORAGE_BUCKET="posthog" export OBJECT_STORAGE_ACCESS_KEY_ID="object_storage_root_user" export OBJECT_STORAGE_SECRET_ACCESS_KEY="object_storage_root_password" -cargo run --bin cymbal +RUST_LOG=info cargo run --bin cymbal diff --git a/rust/cymbal/src/main.rs b/rust/cymbal/src/main.rs index aeef8632946..7974ec122fd 100644 --- a/rust/cymbal/src/main.rs +++ b/rust/cymbal/src/main.rs @@ -97,6 +97,8 @@ async fn main() { .await .expect("Failed to send event to Kafka"); + info!("Processed event"); + offset.store().unwrap(); metrics::counter!(STACK_PROCESSED).increment(1);