0
0
mirror of https://github.com/PostHog/posthog.git synced 2024-11-21 13:39:22 +01:00

add fetching symbol sets and symbol set associated frames

This commit is contained in:
Oliver Browne 2024-11-20 01:04:34 +02:00
parent 0dff32323b
commit 7dbfa0e49f
9 changed files with 103 additions and 64 deletions

View File

@ -113,6 +113,7 @@ import {
} from '~/types'
import { AlertType, AlertTypeWrite } from './components/Alerts/types'
import { ErrorTrackingStackFrame, ErrorTrackingSymbolSet } from './components/Errors/stackFrameLogic'
import {
ACTIVITY_PAGE_SIZE,
DashboardPrivilegeLevel,
@ -723,6 +724,14 @@ class ApiRequest {
return this.errorTracking().addPathComponent('stack_frames').withQueryString({ ids })
}
public symbolSets(): ApiRequest {
return this.errorTracking().withAction('symbol_sets')
}
public symbolSetStackFrames(symbolSetId: string): ApiRequest {
return this.symbolSets().withAction(symbolSetId).withAction('stack_frames')
}
// # Warehouse
public dataWarehouseTables(teamId?: TeamType['id']): ApiRequest {
return this.projectsDetail(teamId).addPathComponent('warehouse_tables')
@ -1865,6 +1874,14 @@ const api = {
async fetchStackFrames(ids: string[]): Promise<{ content: string }> {
return await new ApiRequest().errorTrackingStackFrames(ids).get()
},
async fetchSymbolSetStackFrames(symbolSetId: string): Promise<ErrorTrackingStackFrame[]> {
return await new ApiRequest().symbolSetStackFrames(symbolSetId).get()
},
async fetchSymbolSets(): Promise<ErrorTrackingSymbolSet[]> {
return await new ApiRequest().symbolSets().get()
},
},
recordings: {

View File

@ -11,15 +11,15 @@ import { useState } from 'react'
import { EventType } from '~/types'
import { StackFrame } from './stackFrameLogic'
import { ErrorTrackingStackFrame } from './stackFrameLogic'
interface RawStackTrace {
type: 'raw'
frames: StackFrame[]
frames: ErrorTrackingStackFrame[]
}
interface ResolvedStackTrace {
type: 'resolved'
frames: StackFrame[]
frames: ErrorTrackingStackFrame[]
}
interface Exception {
@ -29,7 +29,13 @@ interface Exception {
value: string
}
function StackTrace({ frames, showAllFrames }: { frames: StackFrame[]; showAllFrames: boolean }): JSX.Element | null {
function StackTrace({
frames,
showAllFrames,
}: {
frames: ErrorTrackingStackFrame[]
showAllFrames: boolean
}): JSX.Element | null {
const displayFrames = showAllFrames ? frames : frames.filter((f) => f.in_app)
const panels = displayFrames.map(({ filename, lineno, colno, function: functionName }, index) => {

View File

@ -4,19 +4,33 @@ import api from 'lib/api'
import type { stackFrameLogicType } from './stackFrameLogicType'
export interface StackFrame {
export interface ErrorTrackingStackFrame {
filename: string
lineno: number
colno: number
function: string
in_app?: boolean
raw_id: string
created_at: string
resolved: boolean
context: string | null
contents: Record<string, any> // More flexible for varying content structures
}
export interface ErrorTrackingSymbolSet {
id: string
ref: string
team_id: number
created_at: string
storage_ptr: string | null
failure_reason: string | null
}
export const stackFrameLogic = kea<stackFrameLogicType>([
path(['components', 'Errors', 'stackFrameLogic']),
loaders(({ values }) => ({
stackFrames: [
{} as Record<string, StackFrame>,
{} as Record<string, ErrorTrackingStackFrame>,
{
loadFrames: async ({ frameIds }: { frameIds: string[] }) => {
const loadedFrameIds = Object.keys(values.stackFrames)

View File

@ -259,7 +259,7 @@ export class EventPipelineRunner {
event.team_id
)
if (event.event === '$exception' && event.team_id == 2) {
if (event.event === '$exception') {
const [exceptionAck] = await this.runStep(
produceExceptionSymbolificationEventStep,
[this, rawEvent],

View File

@ -28,6 +28,7 @@ from . import (
dead_letter_queue,
debug_ch_queries,
early_access_feature,
error_tracking,
event_definition,
exports,
feature_flag,
@ -499,12 +500,12 @@ projects_router.register(
["project_id"],
)
# projects_router.register(
# r"error_tracking",
# error_tracking.ErrorTrackingGroupViewSet,
# "project_error_tracking",
# ["team_id"],
# )
projects_router.register(
r"error_tracking/symbol_sets",
error_tracking.ErrorTrackingSymbolSetViewSet,
"project_error_tracking_symbol_sets",
["project_id"],
)
projects_router.register(
r"comments",

View File

@ -1,57 +1,48 @@
import structlog
from rest_framework import serializers, viewsets, mixins
from rest_framework.decorators import action
from rest_framework.response import Response
from django.db.models import QuerySet
from posthog.api.routing import TeamAndOrgViewSetMixin
from posthog.models.error_tracking.error_tracking import ErrorTrackingSymbolSet, ErrorTrackingStackFrame
FIFTY_MEGABYTES = 50 * 1024 * 1024
logger = structlog.get_logger(__name__)
class ErrorTrackingStackFrameSerializer(serializers.ModelSerializer):
class Meta:
model = ErrorTrackingStackFrame
fields = ["id", "raw_id", "created_at", "contents", "resolved", "context"]
class ObjectStorageUnavailable(Exception):
pass
class ErrorTrackingSymbolSetSerializer(serializers.ModelSerializer):
class Meta:
model = ErrorTrackingSymbolSet
fields = ["id", "ref", "team_id", "created_at", "storage_ptr", "failure_reason"]
read_only_fields = ["team_id"]
# class ErrorTrackingGroupSerializer(serializers.ModelSerializer):
# class Meta:
# model = ErrorTrackingGroup
# fields = ["assignee", "status"]
class ErrorTrackingSymbolSetViewSet(
TeamAndOrgViewSetMixin,
mixins.ListModelMixin,
mixins.RetrieveModelMixin,
mixins.DestroyModelMixin,
viewsets.GenericViewSet,
):
scope_object = "query"
serializer_class = ErrorTrackingSymbolSetSerializer
queryset = ErrorTrackingSymbolSet.objects.all()
scope_object_read_actions = ["list", "retrieve", "stack_frames"] # Add this line
# class ErrorTrackingGroupViewSet(TeamAndOrgViewSetMixin, ForbidDestroyModel, viewsets.ModelViewSet):
# scope_object = "INTERNAL"
# queryset = ErrorTrackingGroup.objects.all()
# serializer_class = ErrorTrackingGroupSerializer
def safely_get_queryset(self, queryset: QuerySet) -> QuerySet:
return queryset.filter(team_id=self.team.id)
# def safely_get_object(self, queryset) -> QuerySet:
# stringified_fingerprint = self.kwargs["pk"]
# fingerprint = json.loads(urlsafe_base64_decode(stringified_fingerprint))
# group, _ = queryset.get_or_create(fingerprint=fingerprint, team=self.team)
# return group
@action(methods=["GET"], detail=True)
def stack_frames(self, request, *args, **kwargs):
symbol_set = self.get_object()
frames = ErrorTrackingStackFrame.objects.filter(symbol_set=symbol_set, team_id=self.team.id)
serializer = ErrorTrackingStackFrameSerializer(frames, many=True)
return Response(serializer.data)
# @action(methods=["POST"], detail=True)
# def merge(self, request, **kwargs):
# group: ErrorTrackingGroup = self.get_object()
# merging_fingerprints: list[list[str]] = request.data.get("merging_fingerprints", [])
# group.merge(merging_fingerprints)
# return Response({"success": True})
# @action(methods=["POST"], detail=False)
# def upload_source_maps(self, request, **kwargs):
# try:
# if settings.OBJECT_STORAGE_ENABLED:
# file = request.FILES["source_map"]
# if file.size > FIFTY_MEGABYTES:
# raise ValidationError(code="file_too_large", detail="Source maps must be less than 50MB")
# upload_path = (
# f"{settings.OBJECT_STORAGE_ERROR_TRACKING_SOURCE_MAPS_FOLDER}/team-{self.team_id}/{file.name}"
# )
# object_storage.write(upload_path, file)
# return Response({"ok": True}, status=status.HTTP_201_CREATED)
# else:
# raise ObjectStorageUnavailable()
# except ObjectStorageUnavailable:
# raise ValidationError(
# code="object_storage_required",
# detail="Object storage must be available to allow source map uploads.",
# )
def perform_destroy(self, instance):
# The related stack frames will be deleted via CASCADE
instance.delete()

View File

@ -7,6 +7,7 @@ use cymbal::{
};
use envconfig::Envconfig;
use health::HealthRegistry;
use uuid::Uuid;
const EXCEPTION_DATA: &str = include_str!("../../tests/static/raw_ch_exception_list.json");
@ -19,13 +20,20 @@ async fn main() {
.await;
let producer = create_kafka_producer(&config, handle).await.unwrap();
let exception: ClickHouseEvent = serde_json::from_str(EXCEPTION_DATA).unwrap();
let exceptions = (0..10000).map(|_| exception.clone()).collect::<Vec<_>>();
let mut exception: ClickHouseEvent = serde_json::from_str(EXCEPTION_DATA).unwrap();
exception.team_id = 1;
exception.project_id = 1;
let exceptions = (0..100).map(|_| exception.clone()).collect::<Vec<_>>();
get_props(&exception).unwrap();
loop {
println!("Sending {} exception kafka", exceptions.len());
send_iter_to_kafka(&producer, "exception_symbolification_events", &exceptions)
let to_send = exceptions.iter().map(|e| {
let mut e = e.clone();
e.uuid = Uuid::now_v7();
e
});
send_iter_to_kafka(&producer, "exception_symbolification_events", to_send)
.await
.unwrap();
tokio::time::sleep(std::time::Duration::from_secs(1)).await;

View File

@ -4,4 +4,4 @@ export OBJECT_STORAGE_BUCKET="posthog"
export OBJECT_STORAGE_ACCESS_KEY_ID="object_storage_root_user"
export OBJECT_STORAGE_SECRET_ACCESS_KEY="object_storage_root_password"
cargo run --bin cymbal
RUST_LOG=info cargo run --bin cymbal

View File

@ -97,6 +97,8 @@ async fn main() {
.await
.expect("Failed to send event to Kafka");
info!("Processed event");
offset.store().unwrap();
metrics::counter!(STACK_PROCESSED).increment(1);