mirror of
https://github.com/PostHog/posthog.git
synced 2024-11-21 13:39:22 +01:00
fix(insights): Fix interval for total value queries (#23037)
This commit is contained in:
parent
837fe038ed
commit
96f60f80db
@ -1,62 +1,62 @@
|
||||
version: "3.8"
|
||||
version: '3.8'
|
||||
|
||||
services:
|
||||
postgres:
|
||||
image: postgres:16-alpine
|
||||
restart: always
|
||||
ports:
|
||||
- "5432:5432"
|
||||
environment:
|
||||
POSTGRES_USER: postgres
|
||||
POSTGRES_PASSWORD: postgres
|
||||
POSTGRES_DB: liveevents
|
||||
healthcheck:
|
||||
test: ["CMD-SHELL", "pg_isready -U postgres"]
|
||||
interval: 5s
|
||||
timeout: 5s
|
||||
postgres:
|
||||
image: postgres:16-alpine
|
||||
restart: always
|
||||
ports:
|
||||
- '5432:5432'
|
||||
environment:
|
||||
POSTGRES_USER: postgres
|
||||
POSTGRES_PASSWORD: postgres
|
||||
POSTGRES_DB: liveevents
|
||||
healthcheck:
|
||||
test: ['CMD-SHELL', 'pg_isready -U postgres']
|
||||
interval: 5s
|
||||
timeout: 5s
|
||||
|
||||
redis:
|
||||
image: redis:alpine
|
||||
restart: always
|
||||
ports:
|
||||
- "6379:6379"
|
||||
redis:
|
||||
image: redis:alpine
|
||||
restart: always
|
||||
ports:
|
||||
- '6379:6379'
|
||||
|
||||
redpanda:
|
||||
image: vectorized/redpanda:v23.2.17
|
||||
command:
|
||||
- redpanda start
|
||||
- --smp 1
|
||||
- --overprovisioned
|
||||
- --node-id 0
|
||||
- --kafka-addr PLAINTEXT://0.0.0.0:29092,OUTSIDE://0.0.0.0:9092
|
||||
- --advertise-kafka-addr PLAINTEXT://redpanda:29092,OUTSIDE://localhost:9092
|
||||
- --pandaproxy-addr 0.0.0.0:8082
|
||||
- --advertise-pandaproxy-addr localhost:8082
|
||||
ports:
|
||||
- 8081:8081
|
||||
- 8082:8082
|
||||
- 9092:9092
|
||||
- 29092:29092
|
||||
redpanda:
|
||||
image: vectorized/redpanda:v23.2.17
|
||||
command:
|
||||
- redpanda start
|
||||
- --smp 1
|
||||
- --overprovisioned
|
||||
- --node-id 0
|
||||
- --kafka-addr PLAINTEXT://0.0.0.0:29092,OUTSIDE://0.0.0.0:9092
|
||||
- --advertise-kafka-addr PLAINTEXT://redpanda:29092,OUTSIDE://localhost:9092
|
||||
- --pandaproxy-addr 0.0.0.0:8082
|
||||
- --advertise-pandaproxy-addr localhost:8082
|
||||
ports:
|
||||
- 8081:8081
|
||||
- 8082:8082
|
||||
- 9092:9092
|
||||
- 29092:29092
|
||||
|
||||
console:
|
||||
image: docker.redpanda.com/redpandadata/console:v2.3.8
|
||||
restart: on-failure
|
||||
entrypoint: /bin/sh
|
||||
command: -c "echo \"$$CONSOLE_CONFIG_FILE\" > /tmp/config.yml; /app/console"
|
||||
environment:
|
||||
CONFIG_FILEPATH: /tmp/config.yml
|
||||
CONSOLE_CONFIG_FILE: |
|
||||
kafka:
|
||||
brokers: ["redpanda:29092"]
|
||||
schemaRegistry:
|
||||
enabled: true
|
||||
urls: ["http://redpanda:8081"]
|
||||
connect:
|
||||
enabled: true
|
||||
clusters:
|
||||
- name: datagen
|
||||
url: http://connect:8083
|
||||
ports:
|
||||
- "8088:8088"
|
||||
depends_on:
|
||||
- redpanda
|
||||
console:
|
||||
image: docker.redpanda.com/redpandadata/console:v2.3.8
|
||||
restart: on-failure
|
||||
entrypoint: /bin/sh
|
||||
command: -c "echo \"$$CONSOLE_CONFIG_FILE\" > /tmp/config.yml; /app/console"
|
||||
environment:
|
||||
CONFIG_FILEPATH: /tmp/config.yml
|
||||
CONSOLE_CONFIG_FILE: |
|
||||
kafka:
|
||||
brokers: ["redpanda:29092"]
|
||||
schemaRegistry:
|
||||
enabled: true
|
||||
urls: ["http://redpanda:8081"]
|
||||
connect:
|
||||
enabled: true
|
||||
clusters:
|
||||
- name: datagen
|
||||
url: http://connect:8083
|
||||
ports:
|
||||
- '8088:8088'
|
||||
depends_on:
|
||||
- redpanda
|
||||
|
@ -3390,10 +3390,10 @@
|
||||
(SELECT dateDiff('second', min(sessions.min_timestamp), max(sessions.max_timestamp)) AS `$session_duration`,
|
||||
sessions.session_id AS session_id
|
||||
FROM sessions
|
||||
WHERE and(equals(sessions.team_id, 2), ifNull(greaterOrEquals(plus(toTimeZone(sessions.min_timestamp, 'UTC'), toIntervalDay(3)), toStartOfWeek(assumeNotNull(parseDateTime64BestEffortOrNull('2019-12-28 00:00:00', 6, 'UTC')), 0)), 0), ifNull(lessOrEquals(minus(toTimeZone(sessions.min_timestamp, 'UTC'), toIntervalDay(3)), assumeNotNull(parseDateTime64BestEffortOrNull('2020-01-04 23:59:59', 6, 'UTC'))), 0))
|
||||
WHERE and(equals(sessions.team_id, 2), ifNull(greaterOrEquals(plus(toTimeZone(sessions.min_timestamp, 'UTC'), toIntervalDay(3)), toStartOfDay(assumeNotNull(parseDateTime64BestEffortOrNull('2019-12-28 00:00:00', 6, 'UTC')))), 0), ifNull(lessOrEquals(minus(toTimeZone(sessions.min_timestamp, 'UTC'), toIntervalDay(3)), assumeNotNull(parseDateTime64BestEffortOrNull('2020-01-04 23:59:59', 6, 'UTC'))), 0))
|
||||
GROUP BY sessions.session_id,
|
||||
sessions.session_id) AS e__session ON equals(e.`$session_id`, e__session.session_id)
|
||||
WHERE and(equals(e.team_id, 2), greaterOrEquals(toTimeZone(e.timestamp, 'UTC'), toStartOfWeek(assumeNotNull(parseDateTime64BestEffortOrNull('2019-12-28 00:00:00', 6, 'UTC')), 0)), lessOrEquals(toTimeZone(e.timestamp, 'UTC'), assumeNotNull(parseDateTime64BestEffortOrNull('2020-01-04 23:59:59', 6, 'UTC'))), equals(e.event, 'sign up'), true)
|
||||
WHERE and(equals(e.team_id, 2), greaterOrEquals(toTimeZone(e.timestamp, 'UTC'), toStartOfDay(assumeNotNull(parseDateTime64BestEffortOrNull('2019-12-28 00:00:00', 6, 'UTC')))), lessOrEquals(toTimeZone(e.timestamp, 'UTC'), assumeNotNull(parseDateTime64BestEffortOrNull('2020-01-04 23:59:59', 6, 'UTC'))), equals(e.event, 'sign up'), true)
|
||||
GROUP BY e.`$session_id`,
|
||||
breakdown_value
|
||||
ORDER BY 1 DESC, breakdown_value DESC)
|
||||
@ -3867,7 +3867,7 @@
|
||||
(SELECT dateDiff('second', min(sessions.min_timestamp), max(sessions.max_timestamp)) AS `$session_duration`,
|
||||
sessions.session_id AS session_id
|
||||
FROM sessions
|
||||
WHERE and(equals(sessions.team_id, 2), ifNull(greaterOrEquals(plus(toTimeZone(sessions.min_timestamp, 'UTC'), toIntervalDay(3)), toStartOfWeek(assumeNotNull(parseDateTime64BestEffortOrNull('2019-12-28 00:00:00', 6, 'UTC')), 0)), 0), ifNull(lessOrEquals(minus(toTimeZone(sessions.min_timestamp, 'UTC'), toIntervalDay(3)), assumeNotNull(parseDateTime64BestEffortOrNull('2020-01-04 23:59:59', 6, 'UTC'))), 0))
|
||||
WHERE and(equals(sessions.team_id, 2), ifNull(greaterOrEquals(plus(toTimeZone(sessions.min_timestamp, 'UTC'), toIntervalDay(3)), toStartOfDay(assumeNotNull(parseDateTime64BestEffortOrNull('2019-12-28 00:00:00', 6, 'UTC')))), 0), ifNull(lessOrEquals(minus(toTimeZone(sessions.min_timestamp, 'UTC'), toIntervalDay(3)), assumeNotNull(parseDateTime64BestEffortOrNull('2020-01-04 23:59:59', 6, 'UTC'))), 0))
|
||||
GROUP BY sessions.session_id,
|
||||
sessions.session_id) AS e__session ON equals(e.`$session_id`, e__session.session_id)
|
||||
INNER JOIN
|
||||
@ -3888,7 +3888,7 @@
|
||||
WHERE equals(person.team_id, 2)
|
||||
GROUP BY person.id
|
||||
HAVING and(ifNull(equals(argMax(person.is_deleted, person.version), 0), 0), ifNull(less(argMax(person.created_at, person.version), plus(now64(6, 'UTC'), toIntervalDay(1))), 0)))), 0)) SETTINGS optimize_aggregation_in_order=1) AS e__pdi__person ON equals(e__pdi.e__pdi___person_id, e__pdi__person.id)
|
||||
WHERE and(equals(e.team_id, 2), greaterOrEquals(toTimeZone(e.timestamp, 'UTC'), toStartOfWeek(assumeNotNull(parseDateTime64BestEffortOrNull('2019-12-28 00:00:00', 6, 'UTC')), 0)), lessOrEquals(toTimeZone(e.timestamp, 'UTC'), assumeNotNull(parseDateTime64BestEffortOrNull('2020-01-04 23:59:59', 6, 'UTC'))), equals(e.event, 'sign up'), true)
|
||||
WHERE and(equals(e.team_id, 2), greaterOrEquals(toTimeZone(e.timestamp, 'UTC'), toStartOfDay(assumeNotNull(parseDateTime64BestEffortOrNull('2019-12-28 00:00:00', 6, 'UTC')))), lessOrEquals(toTimeZone(e.timestamp, 'UTC'), assumeNotNull(parseDateTime64BestEffortOrNull('2020-01-04 23:59:59', 6, 'UTC'))), equals(e.event, 'sign up'), true)
|
||||
GROUP BY e.`$session_id`,
|
||||
breakdown_value
|
||||
ORDER BY 1 DESC, breakdown_value DESC)
|
||||
@ -3944,10 +3944,10 @@
|
||||
(SELECT dateDiff('second', min(sessions.min_timestamp), max(sessions.max_timestamp)) AS `$session_duration`,
|
||||
sessions.session_id AS session_id
|
||||
FROM sessions
|
||||
WHERE and(equals(sessions.team_id, 2), ifNull(greaterOrEquals(plus(toTimeZone(sessions.min_timestamp, 'UTC'), toIntervalDay(3)), toStartOfWeek(assumeNotNull(parseDateTime64BestEffortOrNull('2019-12-28 00:00:00', 6, 'UTC')), 0)), 0), ifNull(lessOrEquals(minus(toTimeZone(sessions.min_timestamp, 'UTC'), toIntervalDay(3)), assumeNotNull(parseDateTime64BestEffortOrNull('2020-01-04 23:59:59', 6, 'UTC'))), 0))
|
||||
WHERE and(equals(sessions.team_id, 2), ifNull(greaterOrEquals(plus(toTimeZone(sessions.min_timestamp, 'UTC'), toIntervalDay(3)), toStartOfDay(assumeNotNull(parseDateTime64BestEffortOrNull('2019-12-28 00:00:00', 6, 'UTC')))), 0), ifNull(lessOrEquals(minus(toTimeZone(sessions.min_timestamp, 'UTC'), toIntervalDay(3)), assumeNotNull(parseDateTime64BestEffortOrNull('2020-01-04 23:59:59', 6, 'UTC'))), 0))
|
||||
GROUP BY sessions.session_id,
|
||||
sessions.session_id) AS e__session ON equals(e.`$session_id`, e__session.session_id)
|
||||
WHERE and(equals(e.team_id, 2), greaterOrEquals(toTimeZone(e.timestamp, 'UTC'), toStartOfWeek(assumeNotNull(parseDateTime64BestEffortOrNull('2019-12-28 00:00:00', 6, 'UTC')), 0)), lessOrEquals(toTimeZone(e.timestamp, 'UTC'), assumeNotNull(parseDateTime64BestEffortOrNull('2020-01-04 23:59:59', 6, 'UTC'))), equals(e.event, 'sign up'))
|
||||
WHERE and(equals(e.team_id, 2), greaterOrEquals(toTimeZone(e.timestamp, 'UTC'), toStartOfDay(assumeNotNull(parseDateTime64BestEffortOrNull('2019-12-28 00:00:00', 6, 'UTC')))), lessOrEquals(toTimeZone(e.timestamp, 'UTC'), assumeNotNull(parseDateTime64BestEffortOrNull('2020-01-04 23:59:59', 6, 'UTC'))), equals(e.event, 'sign up'))
|
||||
GROUP BY e.`$session_id`
|
||||
ORDER BY 1 DESC)
|
||||
LIMIT 50000 SETTINGS readonly=2,
|
||||
|
@ -1429,6 +1429,24 @@ class TestTrendsQueryRunner(ClickhouseTestMixin, APIBaseTest):
|
||||
assert response.results[0]["count"] == 0
|
||||
assert response.results[0]["aggregated_value"] == 10
|
||||
|
||||
def test_trends_display_aggregate_interval(self):
|
||||
self._create_test_events()
|
||||
|
||||
response = self._run_trends_query(
|
||||
"2020-01-09",
|
||||
"2020-01-20",
|
||||
IntervalType.MONTH, # E.g. UI sets interval to month, but we need the total value across all days
|
||||
[EventsNode(event="$pageview")],
|
||||
TrendsFilter(display=ChartDisplayType.BOLD_NUMBER),
|
||||
None,
|
||||
)
|
||||
|
||||
assert len(response.results) == 1
|
||||
assert response.results[0]["data"] == []
|
||||
assert response.results[0]["days"] == []
|
||||
assert response.results[0]["count"] == 0
|
||||
assert response.results[0]["aggregated_value"] == 10
|
||||
|
||||
def test_trends_display_cumulative(self):
|
||||
self._create_test_events()
|
||||
|
||||
|
@ -66,6 +66,7 @@ from posthog.schema import (
|
||||
HogQLQueryModifiers,
|
||||
DataWarehouseEventsModifier,
|
||||
BreakdownType,
|
||||
IntervalType,
|
||||
)
|
||||
from posthog.warehouse.models import DataWarehouseTable
|
||||
from posthog.utils import format_label_date, multisort
|
||||
@ -574,10 +575,11 @@ class TrendsQueryRunner(QueryRunner):
|
||||
|
||||
@cached_property
|
||||
def query_date_range(self):
|
||||
interval = IntervalType.DAY if self._trends_display.is_total_value() else self.query.interval
|
||||
return QueryDateRange(
|
||||
date_range=self.query.dateRange,
|
||||
team=self.team,
|
||||
interval=self.query.interval,
|
||||
interval=interval,
|
||||
now=datetime.now(),
|
||||
)
|
||||
|
||||
|
Loading…
Reference in New Issue
Block a user