From 65fedb23e4a136f5f959656fe826cd2ccd7639a0 Mon Sep 17 00:00:00 2001 From: Eric Duong Date: Tue, 16 Jun 2020 07:49:31 -0400 Subject: [PATCH] Sessions view (#926) * initial foundation for sessions * initial ui * updated icon * temporary repeated code * aggregated properly * working onclick row * reorganize sessions logic * paginate * update test * fix typing * remove materialize script * . * add api test * add e2e test * update label * fix test * initial working materialize sessions * add ellipsis * working with double migration * remove materialized and paginate properly * undo migrations manifest * remove unneeded diffs * fix test errors * fix test * remove button when unnecessary * fix logic * linting error * styling fix * more styling * . * fix test * Add cursor pointer Co-authored-by: Tim Glaser --- cypress/integration/sessions.js | 11 + frontend/src/layout/Sidebar.js | 7 + frontend/src/lib/utils.js | 40 +++- .../src/scenes/dashboard/dashboardLogic.js | 2 +- frontend/src/scenes/events/EventDetails.js | 2 +- frontend/src/scenes/events/EventsTable.js | 1 - frontend/src/scenes/events/index.js | 5 + frontend/src/scenes/paths/Paths.js | 8 +- frontend/src/scenes/sceneLogic.js | 2 + .../src/scenes/sessions/SessionDetails.js | 62 ++++++ frontend/src/scenes/sessions/Sessions.js | 11 + frontend/src/scenes/sessions/SessionsTable.js | 110 ++++++++++ .../src/scenes/sessions/sessionsTableLogic.js | 56 +++++ frontend/src/scenes/trends/trendsLogic.js | 1 + posthog/api/event.py | 192 ++++++++++++------ posthog/api/paths.py | 2 +- posthog/api/test/test_event.py | 43 ++-- posthog/utils.py | 7 +- 18 files changed, 472 insertions(+), 90 deletions(-) create mode 100644 cypress/integration/sessions.js create mode 100644 frontend/src/scenes/events/index.js create mode 100644 frontend/src/scenes/sessions/SessionDetails.js create mode 100644 frontend/src/scenes/sessions/Sessions.js create mode 100644 frontend/src/scenes/sessions/SessionsTable.js create mode 100644 frontend/src/scenes/sessions/sessionsTableLogic.js diff --git a/cypress/integration/sessions.js b/cypress/integration/sessions.js new file mode 100644 index 00000000000..a13f42298ba --- /dev/null +++ b/cypress/integration/sessions.js @@ -0,0 +1,11 @@ +describe('Sessions', () => { + beforeEach(() => { + cy.get('[data-attr=menu-item-events]').click() + cy.get('[data-attr=menu-item-sessions]').click() + }) + + it('Sessions Table loaded', () => { + cy.get('h1').should('contain', 'Sessions') + cy.get('[data-attr=sessions-table]').should('exist') + }) +}) diff --git a/frontend/src/layout/Sidebar.js b/frontend/src/layout/Sidebar.js index 9966b3219b2..e2a7767ec7f 100644 --- a/frontend/src/layout/Sidebar.js +++ b/frontend/src/layout/Sidebar.js @@ -17,6 +17,7 @@ import { ContainerOutlined, LineChartOutlined, FundOutlined, + ClockCircleOutlined, } from '@ant-design/icons' import { useActions, useValues } from 'kea' import { Link } from 'lib/components/Link' @@ -53,6 +54,7 @@ const sceneOverride = { const submenuOverride = { actions: 'events', liveActions: 'events', + sessions: 'events', cohorts: 'people', } @@ -161,6 +163,11 @@ export function Sidebar({ user, sidebarCollapsed, setSidebarCollapsed }) { {'Live Actions'} + + + {'Sessions'} + + @@ -238,7 +239,7 @@ export function clearDOMTextSelection() { if (window.getSelection) { if (window.getSelection().empty) { // Chrome - window.getSelection().empty() + window.getSelecion().empty() } else if (window.getSelection().removeAllRanges) { // Firefox window.getSelection().removeAllRanges() @@ -251,10 +252,45 @@ export function clearDOMTextSelection() { export const posthogEvents = ['$autocapture', '$pageview', '$identify', '$pageleave'] -export default function isAndroidOrIOS() { +export function isAndroidOrIOS() { return typeof window !== 'undefined' && /Android|iPhone|iPad|iPod/i.test(window.navigator.userAgent) } +export function humanFriendlyDuration(d) { + d = Number(d) + var h = Math.floor(d / 3600) + var m = Math.floor((d % 3600) / 60) + var s = Math.floor((d % 3600) % 60) + + var hDisplay = h > 0 ? h + (h == 1 ? 'hr ' : 'hrs ') : '' + var mDisplay = m > 0 ? m + (m == 1 ? 'min ' : 'mins ') : '' + var sDisplay = s > 0 ? s + 's' : hDisplay || mDisplay ? '' : '0s' + return hDisplay + mDisplay + sDisplay +} + +export function humanFriendlyDiff(from, to) { + const diff = moment(to).diff(moment(from), 'seconds') + return humanFriendlyDuration(diff) +} + +export function humanFriendlyDetailedTime(date, withSeconds = false) { + let formatString = 'MMMM Do YYYY h:mm' + if (moment().diff(date, 'days') == 0) { + formatString = '[Today] h:mm' + } else if (moment().diff(date, 'days') == 1) { + formatString = '[Yesterday] h:mm' + } + if (withSeconds) formatString += ':s a' + else formatString += ' a' + return moment(date).format(formatString) +} + +export function stripHTTP(url) { + url = url.replace(/(^[0-9]+_)/, '') + url = url.replace(/(^\w+:|^)\/\//, '') + return url +} + export const eventToName = event => { if (event.event !== '$autocapture') return event.event let name = '' diff --git a/frontend/src/scenes/dashboard/dashboardLogic.js b/frontend/src/scenes/dashboard/dashboardLogic.js index 6a381eab014..2afea13a8e8 100644 --- a/frontend/src/scenes/dashboard/dashboardLogic.js +++ b/frontend/src/scenes/dashboard/dashboardLogic.js @@ -6,7 +6,7 @@ import { router } from 'kea-router' import { toast } from 'react-toastify' import { Link } from 'lib/components/Link' import React from 'react' -import isAndroidOrIOS, { clearDOMTextSelection } from 'lib/utils' +import { isAndroidOrIOS, clearDOMTextSelection } from 'lib/utils' export const dashboardLogic = kea({ connect: [dashboardsModel], diff --git a/frontend/src/scenes/events/EventDetails.js b/frontend/src/scenes/events/EventDetails.js index e408bbff2a4..dd5be42c39c 100644 --- a/frontend/src/scenes/events/EventDetails.js +++ b/frontend/src/scenes/events/EventDetails.js @@ -31,7 +31,7 @@ export function EventDetails({ event }) { }} /> - {event.elements.length > 0 && ( + {event.elements && event.elements.length > 0 && ( diff --git a/frontend/src/scenes/events/EventsTable.js b/frontend/src/scenes/events/EventsTable.js index 363221c489f..cc6557e696d 100644 --- a/frontend/src/scenes/events/EventsTable.js +++ b/frontend/src/scenes/events/EventsTable.js @@ -20,7 +20,6 @@ export function EventsTable({ fixedFilters, filtersEnabled = true, logic, isLive } = useValues(router) const showLinkToPerson = !fixedFilters?.person_id - let columns = [ { title: 'Event', diff --git a/frontend/src/scenes/events/index.js b/frontend/src/scenes/events/index.js new file mode 100644 index 00000000000..b55e8aeae23 --- /dev/null +++ b/frontend/src/scenes/events/index.js @@ -0,0 +1,5 @@ +export * from './EventDetails' +export * from './Events' +export * from './EventElements' +export * from './EventsTable' +export * from './eventsTableLogic' diff --git a/frontend/src/scenes/paths/Paths.js b/frontend/src/scenes/paths/Paths.js index 32e2ef4ba87..91c7d2613ff 100644 --- a/frontend/src/scenes/paths/Paths.js +++ b/frontend/src/scenes/paths/Paths.js @@ -1,6 +1,6 @@ import React, { useRef, useState, useEffect } from 'react' import api from 'lib/api' -import { Card, Loading } from 'lib/utils' +import { Card, Loading, stripHTTP } from 'lib/utils' import { DateFilter } from 'lib/components/DateFilter' import { Row, Modal, Button, Spin, Select } from 'antd' import { EventElements } from 'scenes/events/EventElements' @@ -19,12 +19,6 @@ import { } from 'scenes/paths/pathsLogic' import { userLogic } from 'scenes/userLogic' -let stripHTTP = url => { - url = url.replace(/(^[0-9]+_)/, '') - url = url.replace(/(^\w+:|^)\/\//, '') - return url -} - function rounded_rect(x, y, w, h, r, tl, tr, bl, br) { var retval retval = 'M' + (x + r) + ',' + y diff --git a/frontend/src/scenes/sceneLogic.js b/frontend/src/scenes/sceneLogic.js index 64c5583a46b..39a1ad56d42 100644 --- a/frontend/src/scenes/sceneLogic.js +++ b/frontend/src/scenes/sceneLogic.js @@ -9,6 +9,7 @@ export const scenes = { dashboards: () => import(/* webpackChunkName: 'dashboard' */ './dashboard/Dashboards'), dashboard: () => import(/* webpackChunkName: 'dashboard' */ './dashboard/Dashboard'), events: () => import(/* webpackChunkName: 'events' */ './events/Events'), + sessions: () => import(/* webpackChunkName: 'events' */ './sessions/Sessions'), person: () => import(/* webpackChunkName: 'person' */ './users/Person'), people: () => import(/* webpackChunkName: 'people' */ './users/People'), actions: () => import(/* webpackChunkName: 'actions' */ './actions/Actions'), @@ -46,6 +47,7 @@ export const routes = { '/people': 'people', '/people/new_cohort': 'people', '/people/cohorts': 'cohorts', + '/sessions': 'sessions', } export const sceneLogic = kea({ diff --git a/frontend/src/scenes/sessions/SessionDetails.js b/frontend/src/scenes/sessions/SessionDetails.js new file mode 100644 index 00000000000..55e11ceeafe --- /dev/null +++ b/frontend/src/scenes/sessions/SessionDetails.js @@ -0,0 +1,62 @@ +import React from 'react' +import { Table } from 'antd' +import { humanFriendlyDiff, humanFriendlyDetailedTime } from '~/lib/utils' +import { EventDetails } from 'scenes/events' +import { Property } from 'lib/components/Property' +import { eventToName } from 'lib/utils' + +export function SessionDetails({ events }) { + const columns = [ + { + title: 'Event', + key: 'id', + render: function RenderEvent(event) { + return eventToName(event) + }, + }, + { + title: 'URL / Screen', + key: 'url', + render: function renderURL(event) { + if (!event) return { props: { colSpan: 0 } } + let param = event.properties['$current_url'] ? '$current_url' : '$screen_name' + return + }, + ellipsis: true, + }, + { + title: 'Timestamp', + render: function RenderTimestamp({ timestamp }) { + return {humanFriendlyDetailedTime(timestamp, true)} + }, + }, + { + title: 'Time Elapsed from Previous', + render: function RenderElapsed({ timestamp }, _, index) { + return {index > 0 ? humanFriendlyDiff(events[index - 1]['timestamp'], timestamp) : 0} + }, + }, + { + title: 'Order', + render: function RenderOrder(_, __, index) { + return {index + 1} + }, + }, + ] + + return ( + event.id} + dataSource={events} + pagination={{ pageSize: 50, hideOnSinglePage: true }} + expandable={{ + expandedRowRender: function renderExpand(event) { + return + }, + rowExpandable: event => event, + expandRowByClick: true, + }} + >
+ ) +} diff --git a/frontend/src/scenes/sessions/Sessions.js b/frontend/src/scenes/sessions/Sessions.js new file mode 100644 index 00000000000..8e548730ba8 --- /dev/null +++ b/frontend/src/scenes/sessions/Sessions.js @@ -0,0 +1,11 @@ +import React from 'react' +import { SessionsTable } from './SessionsTable' +import { sessionsTableLogic } from 'scenes/sessions/sessionsTableLogic' +import { hot } from 'react-hot-loader/root' + +export const logic = sessionsTableLogic + +export const Sessions = hot(_Sessions) +function _Sessions(props) { + return +} diff --git a/frontend/src/scenes/sessions/SessionsTable.js b/frontend/src/scenes/sessions/SessionsTable.js new file mode 100644 index 00000000000..930cfdaaef2 --- /dev/null +++ b/frontend/src/scenes/sessions/SessionsTable.js @@ -0,0 +1,110 @@ +import React from 'react' +import { useValues, useActions } from 'kea' +import { Table, Button, Spin } from 'antd' +import { Link } from 'lib/components/Link' +import { humanFriendlyDuration, humanFriendlyDetailedTime, stripHTTP } from '~/lib/utils' +import _ from 'lodash' +import { SessionDetails } from './SessionDetails' +import { DatePicker } from 'antd' +import moment from 'moment' + +export function SessionsTable({ logic }) { + const { sessions, sessionsLoading, offset, isLoadingNext, selectedDate } = useValues(logic) + const { fetchNextSessions, dateChanged } = useActions(logic) + let columns = [ + { + title: 'Person', + key: 'person', + render: function RenderSession(session) { + return ( + + {session.properties.email || session.distinct_id} + + ) + }, + ellipsis: true, + }, + { + title: 'Event Count', + render: function RenderDuration(session) { + return {session.event_count} + }, + }, + { + title: 'Duration', + render: function RenderDuration(session) { + return {humanFriendlyDuration(session.length)} + }, + }, + { + title: 'Start Time', + render: function RenderStartTime(session) { + return {humanFriendlyDetailedTime(session.start_time)} + }, + }, + { + title: 'Start Point', + render: function RenderStartPoint(session) { + return ( + + {!_.isEmpty(session.events) && _.first(session.events).properties?.$current_url + ? stripHTTP(session.events[0].properties.$current_url) + : 'N/A'} + + ) + }, + ellipsis: true, + }, + { + title: 'End Point', + render: function RenderEndPoint(session) { + return ( + + {!_.isEmpty(session.events) && _.last(session.events).properties?.$current_url + ? stripHTTP(_.last(session.events).properties.$current_url) + : 'N/A'} + + ) + }, + ellipsis: true, + }, + ] + + return ( +
+

Sessions By Day

+ + item.global_session_id} + pagination={{ pageSize: 99999, hideOnSinglePage: true }} + rowClassName="cursor-pointer" + dataSource={sessions} + columns={columns} + loading={sessionsLoading} + expandable={{ + expandedRowRender: function renderExpand({ events }) { + return + }, + rowExpandable: () => true, + expandRowByClick: true, + }} + /> +
+
+ {(offset || isLoadingNext) && ( + + )} +
+
+ ) +} diff --git a/frontend/src/scenes/sessions/sessionsTableLogic.js b/frontend/src/scenes/sessions/sessionsTableLogic.js new file mode 100644 index 00000000000..ec7b503cfa2 --- /dev/null +++ b/frontend/src/scenes/sessions/sessionsTableLogic.js @@ -0,0 +1,56 @@ +import { kea } from 'kea' +import api from 'lib/api' +import moment from 'moment' +import { toParams } from 'lib/utils' + +export const sessionsTableLogic = kea({ + loaders: ({ actions }) => ({ + sessions: { + __default: [], + loadSessions: async selectedDate => { + const response = await api.get( + 'api/event/sessions' + (selectedDate ? '/?date_from=' + selectedDate.toISOString() : '') + ) + if (response.offset) actions.setOffset(response.offset) + if (response.date_from) actions.setDate(moment(response.date_from).startOf('day')) + return response.result + }, + }, + }), + actions: () => ({ + setOffset: offset => ({ offset }), + fetchNextSessions: true, + appendNewSessions: sessions => ({ sessions }), + dateChanged: date => ({ date }), + setDate: date => ({ date }), + }), + reducers: () => ({ + sessions: { + appendNewSessions: (state, { sessions }) => [...state, ...sessions], + }, + isLoadingNext: [false, { fetchNextSessions: () => true, appendNewSessions: () => false }], + offset: [ + null, + { + setOffset: (_, { offset }) => offset, + }, + ], + selectedDate: [moment().startOf('day'), { dateChanged: (_, { date }) => date, setDate: (_, { date }) => date }], + }), + listeners: ({ values, actions }) => ({ + fetchNextSessions: async () => { + const response = await api.get( + 'api/event/sessions/?' + toParams({ date_from: values.selectedDate, offset: values.offset }) + ) + if (response.offset) actions.setOffset(response.offset) + else actions.setOffset(null) + actions.appendNewSessions(response.result) + }, + dateChanged: ({ date }) => { + actions.loadSessions(date) + }, + }), + events: ({ actions }) => ({ + afterMount: actions.loadSessions, + }), +}) diff --git a/frontend/src/scenes/trends/trendsLogic.js b/frontend/src/scenes/trends/trendsLogic.js index 9ca2cae9cb5..7a8e14be832 100644 --- a/frontend/src/scenes/trends/trendsLogic.js +++ b/frontend/src/scenes/trends/trendsLogic.js @@ -102,6 +102,7 @@ export const trendsLogic = kea({ (refresh ? 'refresh=true&' : '') + toAPIParams(filterClientSideParams(values.filters)) ) + response = response.result } else { response = await api.get( 'api/action/trends/?' + diff --git a/posthog/api/event.py b/posthog/api/event.py index ddc150b9326..84b83352299 100644 --- a/posthog/api/event.py +++ b/posthog/api/event.py @@ -1,4 +1,5 @@ from datetime import datetime, timedelta +from dateutil.relativedelta import relativedelta from posthog.models import Event, Person, Element, Action, ElementGroup, Filter, PersonDistinctId, Team from posthog.utils import friendly_time, request_to_date_query, append_data, convert_property_value, get_compare_period_dates, dict_from_cursor_fetchall from rest_framework import request, response, serializers, viewsets @@ -12,6 +13,7 @@ from typing import Any, Dict, List, Union from django.utils.timezone import now import json import pandas as pd +from typing import Tuple, Optional class ElementSerializer(serializers.ModelSerializer): event = serializers.CharField() @@ -124,6 +126,12 @@ class EventViewSet(viewsets.ModelViewSet): event.elements_group_cache = None # type: ignore return events + def _prefech_elements(self, hash_ids: List[str], team: Team) -> QuerySet: + groups = ElementGroup.objects.none() + if len(hash_ids) > 0: + groups = ElementGroup.objects.filter(team=team, hash__in=hash_ids).prefetch_related('element_set') + return groups + def list(self, request: request.Request, *args: Any, **kwargs: Any) -> response.Response: queryset = self.get_queryset() monday = now() + timedelta(days=-now().weekday()) @@ -207,15 +215,12 @@ class EventViewSet(viewsets.ModelViewSet): return response.Response([{'name': convert_property_value(value.value)} for value in values]) - def _handle_compared(self, date_filter: Dict[str, datetime], session_type: str) -> List[Dict[str, Any]]: + def _handle_compared(self, date_filter: Dict[str, datetime]) -> QuerySet: date_from, date_to = get_compare_period_dates(date_filter['timestamp__gte'], date_filter['timestamp__lte']) date_filter['timestamp__gte'] = date_from date_filter['timestamp__lte'] = date_to compared_events = self.get_queryset().filter(**date_filter) - - compared_calculated = self.calculate_sessions(compared_events, session_type, date_filter) - - return compared_calculated + return compared_events def _convert_to_comparison(self, trend_entity: List[Dict[str, Any]], label: str) -> List[Dict[str, Any]]: for entity in trend_entity: @@ -227,8 +232,9 @@ class EventViewSet(viewsets.ModelViewSet): @action(methods=['GET'], detail=False) def sessions(self, request: request.Request) -> response.Response: team = self.request.user.team_set.get() - date_filter = request_to_date_query(request.GET.dict()) + session_type = self.request.GET.get('session') + date_filter = request_to_date_query(request.GET.dict(), exact=True) if not date_filter.get('timestamp__gte'): date_filter['timestamp__gte'] = Event.objects.filter(team=team)\ .order_by('timestamp')[0]\ @@ -238,26 +244,49 @@ class EventViewSet(viewsets.ModelViewSet): if not date_filter.get('timestamp__lte'): date_filter['timestamp__lte'] = now() - events = self.get_queryset().filter(**date_filter) - - session_type = self.request.GET.get('session') + events = self.get_queryset() + if session_type is not None: + events = events.filter(**date_filter) + calculated = [] # get compared period compare = request.GET.get('compare') + result: Dict[str, Any] = {'result': []} if compare and request.GET.get('date_from') != 'all' and session_type == 'avg': - calculated = self.calculate_sessions(events, session_type, date_filter) + calculated = self.calculate_sessions(events, session_type, date_filter, team, request) calculated = self._convert_to_comparison(calculated, 'current') - compared_calculated = self._handle_compared(date_filter, session_type) + compared_events = self._handle_compared(date_filter) + compared_calculated = self.calculate_sessions(compared_events, session_type, date_filter, team, request) converted_compared_calculated = self._convert_to_comparison(compared_calculated, 'previous') calculated.extend(converted_compared_calculated) else: - calculated = self.calculate_sessions(events, session_type, date_filter) + calculated = self.calculate_sessions(events, session_type, date_filter, team, request) + result.update({'result': calculated}) - return response.Response(calculated) + # add pagination + if session_type is None: + offset = int(request.GET.get('offset', '0')) + 50 + if len(calculated) > 49: + date_from = calculated[0]['start_time'].isoformat() + result.update({'offset': offset}) + result.update({'date_from': date_from}) + return response.Response(result) - def calculate_sessions(self, events: QuerySet, session_type: str, date_filter) -> List[Dict[str, Any]]: - sessions = events\ + def calculate_sessions(self, events: QuerySet, session_type: Optional[str], date_filter: Dict[str, datetime], team: Team, request: request.Request) -> List[Dict[str, Any]]: + + # format date filter for session view + _date_gte = Q() + if session_type is None: + if request.GET.get('date_from', None): + _date_gte = Q(timestamp__gte=date_filter['timestamp__gte'], timestamp__lte=date_filter['timestamp__gte'] + relativedelta(days=1)) + else: + dt = events.order_by('-timestamp').values('timestamp')[0]['timestamp'] + if dt: + dt = dt.replace(hour=0, minute=0, second=0, microsecond=0) + _date_gte = Q(timestamp__gte=dt, timestamp__lte=dt + relativedelta(days=1)) + + sessions = events.filter(_date_gte)\ .annotate(previous_timestamp=Window( expression=Lag('timestamp', default=None), partition_by=F('distinct_id'), @@ -270,20 +299,94 @@ class EventViewSet(viewsets.ModelViewSet): )) sessions_sql, sessions_sql_params = sessions.query.sql_with_params() - # TODO: add midnight condition - all_sessions = '\ - SELECT distinct_id, timestamp,\ + SELECT *,\ SUM(new_session) OVER (ORDER BY distinct_id, timestamp) AS global_session_id,\ SUM(new_session) OVER (PARTITION BY distinct_id ORDER BY timestamp) AS user_session_id\ - FROM (SELECT *, CASE WHEN EXTRACT(\'EPOCH\' FROM (timestamp - previous_timestamp)) >= (60 * 30)\ + FROM (SELECT id, distinct_id, event, elements_hash, timestamp, properties, CASE WHEN EXTRACT(\'EPOCH\' FROM (timestamp - previous_timestamp)) >= (60 * 30)\ OR previous_timestamp IS NULL \ THEN 1 ELSE 0 END AS new_session \ FROM ({}) AS inner_sessions\ ) AS outer_sessions'.format(sessions_sql) - def distribution(query): - return 'SELECT COUNT(CASE WHEN length = 0 THEN 1 ELSE NULL END) as first,\ + result: List = [] + if session_type == 'avg': + result = self._session_avg(all_sessions, sessions_sql_params, date_filter) + elif session_type == 'dist': + result = self._session_dist(all_sessions, sessions_sql_params) + else: + result = self._session_list(all_sessions, sessions_sql_params, team, date_filter, request) + + return result + + def _session_list(self, base_query: str, params: Tuple[Any, ...], team: Team, date_filter: Dict[str, datetime], request: request.Request) -> List[Dict[str, Any]]: + session_list = 'SELECT * FROM (SELECT global_session_id, properties, start_time, length, sessions.distinct_id, event_count, events from\ + (SELECT\ + global_session_id,\ + count(1) as event_count,\ + MAX(distinct_id) as distinct_id,\ + EXTRACT(\'EPOCH\' FROM (MAX(timestamp) - MIN(timestamp))) AS length,\ + MIN(timestamp) as start_time,\ + array_agg(json_build_object( \'id\', id, \'event\', event, \'timestamp\', timestamp, \'properties\', properties, \'elements_hash\', elements_hash) ORDER BY timestamp) as events\ + FROM ({}) as count GROUP BY 1) as sessions\ + LEFT OUTER JOIN posthog_persondistinctid ON posthog_persondistinctid.distinct_id = sessions.distinct_id\ + LEFT OUTER JOIN posthog_person ON posthog_person.id = posthog_persondistinctid.person_id\ + ORDER BY start_time DESC) as ordered_sessions OFFSET %s LIMIT 50'.format(base_query) + + with connection.cursor() as cursor: + offset = request.GET.get('offset', 0) + params = params + (offset,) + cursor.execute(session_list, params) + sessions = dict_from_cursor_fetchall(cursor) + + hash_ids = [] + for session in sessions: + for event in session['events']: + if event.get('elements_hash'): + hash_ids.append(event['elements_hash']) + + groups = self._prefech_elements(hash_ids, team) + + for session in sessions: + for event in session['events']: + try: + event.update({'elements': ElementSerializer([group for group in groups if group.hash == event['elements_hash']][0].element_set.all().order_by('order'), many=True).data}) + except IndexError: + event.update({'elements': []}) + result = sessions + return result + + def _session_avg(self, base_query: str, params: Tuple[Any, ...], date_filter: Dict[str, datetime]) -> List[Dict[str, Any]]: + average_length_time = 'SELECT date_trunc(\'day\', timestamp) as start_time,\ + AVG(length) AS average_session_length_per_day,\ + SUM(length) AS total_session_length_per_day, \ + COUNT(1) as num_sessions_per_day\ + FROM (SELECT global_session_id, EXTRACT(\'EPOCH\' FROM (MAX(timestamp) - MIN(timestamp)))\ + AS length,\ + MIN(timestamp) as timestamp FROM ({}) as count GROUP BY 1) as agg group by 1 order by start_time'.format(base_query) + + cursor = connection.cursor() + cursor.execute(average_length_time, params) + time_series_avg = cursor.fetchall() + time_series_avg_friendly = [] + date_range = pd.date_range(date_filter['timestamp__gte'].date(), date_filter['timestamp__lte'].date(), freq='D') + time_series_avg_friendly = [(day, round(time_series_avg[index][1] if index < len(time_series_avg) else 0)) for index, day in enumerate(date_range)] + + time_series_data = append_data(time_series_avg_friendly, math=None) + + # calculate average + totals = [sum(x) for x in list(zip(*time_series_avg))[2:4]] + overall_average = (totals[0] / totals[1]) if totals else 0 + avg_formatted = friendly_time(overall_average) + avg_split = avg_formatted.split(' ') + + time_series_data.update({'label': 'Average Duration of Session ({})'.format(avg_split[1]), 'count': int(avg_split[0])}) + time_series_data.update({"chartLabel": 'Average Duration of Session (seconds)'}) + result = [time_series_data] + return result + + def _session_dist(self, base_query: str, params: Tuple[Any, ...]) -> List[Dict[str, Any]]: + distribution = 'SELECT COUNT(CASE WHEN length = 0 THEN 1 ELSE NULL END) as first,\ COUNT(CASE WHEN length > 0 AND length <= 3 THEN 1 ELSE NULL END) as second,\ COUNT(CASE WHEN length > 3 AND length <= 10 THEN 1 ELSE NULL END) as third,\ COUNT(CASE WHEN length > 10 AND length <= 30 THEN 1 ELSE NULL END) as fourth,\ @@ -294,44 +397,11 @@ class EventViewSet(viewsets.ModelViewSet): COUNT(CASE WHEN length > 1800 AND length <= 3600 THEN 1 ELSE NULL END) as ninth,\ COUNT(CASE WHEN length > 3600 THEN 1 ELSE NULL END) as tenth\ FROM (SELECT global_session_id, EXTRACT(\'EPOCH\' FROM (MAX(timestamp) - MIN(timestamp)))\ - AS length FROM ({}) as count GROUP BY 1) agg'.format(query) + AS length FROM ({}) as count GROUP BY 1) agg'.format(base_query) - def average_length_time(query): - return 'SELECT date_trunc(\'day\', timestamp) as start_time,\ - AVG(length) AS average_session_length_per_day,\ - SUM(length) AS total_session_length_per_day, \ - COUNT(1) as num_sessions_per_day\ - FROM (SELECT global_session_id, EXTRACT(\'EPOCH\' FROM (MAX(timestamp) - MIN(timestamp)))\ - AS length,\ - MIN(timestamp) as timestamp FROM ({}) as count GROUP BY 1) as agg group by 1 order by start_time'.format(query) - - result: List = [] - if session_type == 'avg': - - cursor = connection.cursor() - cursor.execute(average_length_time(all_sessions), sessions_sql_params) - time_series_avg = cursor.fetchall() - time_series_avg_friendly = [] - date_range = pd.date_range(date_filter['timestamp__gte'].date(), date_filter['timestamp__lte'].date(), freq='D') - time_series_avg_friendly = [(day, round(time_series_avg[index][1] if index < len(time_series_avg) else 0)) for index, day in enumerate(date_range)] - - time_series_data = append_data(time_series_avg_friendly, math=None) - - # calculate average - totals = [sum(x) for x in list(zip(*time_series_avg))[2:4]] - overall_average = (totals[0] / totals[1]) if totals else 0 - avg_formatted = friendly_time(overall_average) - avg_split = avg_formatted.split(' ') - - time_series_data.update({'label': 'Average Duration of Session ({})'.format(avg_split[1]), 'count': int(avg_split[0])}) - time_series_data.update({"chartLabel": 'Average Duration of Session (seconds)'}) - - result = [time_series_data] - else: - dist_labels = ['0 seconds (1 event)', '0-3 seconds', '3-10 seconds', '10-30 seconds', '30-60 seconds', '1-3 minutes', '3-10 minutes', '10-30 minutes', '30-60 minutes', '1+ hours'] - cursor = connection.cursor() - cursor.execute(distribution(all_sessions), sessions_sql_params) - calculated = cursor.fetchall() - result = [{'label': dist_labels[index], 'count': calculated[0][index]} for index in range(len(dist_labels))] - - return result + dist_labels = ['0 seconds (1 event)', '0-3 seconds', '3-10 seconds', '10-30 seconds', '30-60 seconds', '1-3 minutes', '3-10 minutes', '10-30 minutes', '30-60 minutes', '1+ hours'] + cursor = connection.cursor() + cursor.execute(distribution, params) + calculated = cursor.fetchall() + result = [{'label': dist_labels[index], 'count': calculated[0][index]} for index in range(len(dist_labels))] + return result \ No newline at end of file diff --git a/posthog/api/paths.py b/posthog/api/paths.py index bcf2155c224..f70e9236da0 100644 --- a/posthog/api/paths.py +++ b/posthog/api/paths.py @@ -94,7 +94,7 @@ class PathsViewSet(viewsets.ViewSet): def list(self, request): team = request.user.team_set.get() resp = [] - date_query = request_to_date_query(request.GET) + date_query = request_to_date_query(request.GET, exact=False) event, path_type, event_filter, start_comparator = self._determine_path_type(request) properties = request.GET.get('properties') start_point = request.GET.get('start') diff --git a/posthog/api/test/test_event.py b/posthog/api/test/test_event.py index 9d248e3c21a..9164beb1ff2 100644 --- a/posthog/api/test/test_event.py +++ b/posthog/api/test/test_event.py @@ -172,6 +172,24 @@ class TestEvents(TransactionBaseTest): self.assertEqual(len(response['results']), 1) self.assertEqual(response['results'][0]['id'], event2.pk) + def test_sessions_list(self): + with freeze_time("2012-01-14T03:21:34.000Z"): + Event.objects.create(team=self.team, event='1st action', distinct_id="1") + Event.objects.create(team=self.team, event='1st action', distinct_id="2") + with freeze_time("2012-01-14T03:25:34.000Z"): + Event.objects.create(team=self.team, event='2nd action', distinct_id="1") + Event.objects.create(team=self.team, event='2nd action', distinct_id="2") + with freeze_time("2012-01-15T03:59:34.000Z"): + Event.objects.create(team=self.team, event='3rd action', distinct_id="1") + Event.objects.create(team=self.team, event='3rd action', distinct_id="2") + with freeze_time("2012-01-15T04:01:34.000Z"): + Event.objects.create(team=self.team, event='4th action', distinct_id="1") + Event.objects.create(team=self.team, event='4th action', distinct_id="2") + + response = self.client.get('/api/event/sessions/').json() + self.assertEqual(len(response['result']), 2) + self.assertEqual(response['result'][0]['global_session_id'], 1) + def test_sessions_avg_length(self): with freeze_time("2012-01-14T03:21:34.000Z"): Event.objects.create(team=self.team, event='1st action', distinct_id="1") @@ -187,15 +205,15 @@ class TestEvents(TransactionBaseTest): Event.objects.create(team=self.team, event='4th action', distinct_id="2") response = self.client.get('/api/event/sessions/?session=avg&date_from=all').json() - self.assertEqual(response[0]['count'], 3) # average length of all sessions + self.assertEqual(response['result'][0]['count'], 3) # average length of all sessions # time series - self.assertEqual(response[0]['data'][0], 240) - self.assertEqual(response[0]['data'][1], 120) - self.assertEqual(response[0]['labels'][0], 'Sat. 14 January') - self.assertEqual(response[0]['labels'][1], 'Sun. 15 January') - self.assertEqual(response[0]['days'][0], '2012-01-14') - self.assertEqual(response[0]['days'][1], '2012-01-15') + self.assertEqual(response['result'][0]['data'][0], 240) + self.assertEqual(response['result'][0]['data'][1], 120) + self.assertEqual(response['result'][0]['labels'][0], 'Sat. 14 January') + self.assertEqual(response['result'][0]['labels'][1], 'Sun. 15 January') + self.assertEqual(response['result'][0]['days'][0], '2012-01-14') + self.assertEqual(response['result'][0]['days'][1], '2012-01-15') def test_sessions_count_buckets(self): @@ -254,16 +272,15 @@ class TestEvents(TransactionBaseTest): with freeze_time("2012-01-21T06:00:30.000Z"): Event.objects.create(team=self.team, event='3rd action', distinct_id="2") - response = self.client.get('/api/event/sessions/?session=distribution&date_from=all').json() - compared_response = self.client.get('/api/event/sessions/?session=distribution&date_from=all&compare=true').json() - - for index, item in enumerate(response): + response = self.client.get('/api/event/sessions/?session=dist&date_from=all').json() + compared_response = self.client.get('/api/event/sessions/?session=dist&date_from=all&compare=true').json() + for index, item in enumerate(response['result']): if item['label'] == '30-60 minutes' or item['label'] == '3-10 seconds': self.assertEqual(item['count'], 2) - self.assertEqual(compared_response[index]['count'], 2) + self.assertEqual(compared_response['result'][index]['count'], 2) else: self.assertEqual(item['count'], 1) - self.assertEqual(compared_response[index]['count'], 1) + self.assertEqual(compared_response['result'][index]['count'], 1) def test_pagination(self): events = [] diff --git a/posthog/utils.py b/posthog/utils.py index 6918e0fef9c..b5e2a45bf47 100644 --- a/posthog/utils.py +++ b/posthog/utils.py @@ -6,7 +6,7 @@ from typing import Dict, Any, List, Union from django.template.loader import get_template from django.http import HttpResponse, JsonResponse, HttpRequest from dateutil import parser -from typing import Tuple +from typing import Tuple, Optional import datetime import json @@ -51,7 +51,7 @@ def relative_date_parse(input: str) -> datetime.datetime: date = date - relativedelta(month=12, day=31) return date.replace(hour=0, minute=0, second=0, microsecond=0) -def request_to_date_query(filters: Dict[str, Any]) -> Dict[str, datetime.datetime]: +def request_to_date_query(filters: Dict[str, Any], exact: Optional[bool]) -> Dict[str, datetime.datetime]: if filters.get('date_from'): date_from = relative_date_parse(filters['date_from']) if filters['date_from'] == 'all': @@ -68,7 +68,8 @@ def request_to_date_query(filters: Dict[str, Any]) -> Dict[str, datetime.datetim if date_from: resp['timestamp__gte'] = date_from.replace(tzinfo=pytz.UTC) if date_to: - resp['timestamp__lte'] = (date_to + relativedelta(days=1)).replace(tzinfo=pytz.UTC) + days = 1 if not exact else 0 + resp['timestamp__lte'] = (date_to + relativedelta(days=days)).replace(tzinfo=pytz.UTC) return resp def render_template(template_name: str, request: HttpRequest, context=None) -> HttpResponse: