0
0
mirror of https://github.com/PostHog/posthog.git synced 2024-11-28 09:16:49 +01:00

Encode prepared urls (#8103)

* encode

* don't use named

* fix typing

* add test

* add more comment

* remove lamdbas

* restore original
This commit is contained in:
Eric Duong 2022-01-19 09:29:00 -05:00 committed by GitHub
parent 6b6de16826
commit 2ea18b382b
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
5 changed files with 110 additions and 5 deletions

View File

@ -43,6 +43,7 @@ from posthog.constants import (
)
from posthog.models.entity import Entity
from posthog.models.filters import Filter
from posthog.utils import encode_get_request_params
class ClickhouseTrendsBreakdown:
@ -238,7 +239,7 @@ class ClickhouseTrendsBreakdown:
"breakdown_value": result_descriptors["breakdown_value"],
"breakdown_type": filter.breakdown_type or "event",
}
parsed_params: Dict[str, Union[Any, int, str]] = {**filter_params, **extra_params}
parsed_params: Dict[str, str] = encode_get_request_params({**filter_params, **extra_params})
parsed_result = {
"aggregated_value": stats[0],
"filter": filter_params,
@ -289,7 +290,7 @@ class ClickhouseTrendsBreakdown:
"breakdown_value": breakdown_value,
"breakdown_type": filter.breakdown_type or "event",
}
parsed_params: Dict[str, Union[Any, int, str]] = {**filter_params, **extra_params}
parsed_params: Dict[str, str] = encode_get_request_params({**filter_params, **extra_params})
persons_url.append(
{
"filter": extra_params,

View File

@ -17,6 +17,7 @@ from ee.clickhouse.sql.trends.volume import (
from posthog.constants import MONTHLY_ACTIVE, TRENDS_CUMULATIVE, TRENDS_DISPLAY_BY_VALUE, WEEKLY_ACTIVE
from posthog.models.entity import Entity
from posthog.models.filters import Filter
from posthog.utils import encode_get_request_params
class ClickhouseTrendsTotalVolume:
@ -91,7 +92,7 @@ class ClickhouseTrendsTotalVolume:
time_range = enumerate_time_range(filter, seconds_in_interval)
filter_params = filter.to_params()
extra_params = {"entity_id": entity.id, "entity_type": entity.type, "entity_math": entity.math}
parsed_params: Dict[str, Union[Any, int, str]] = {**filter_params, **extra_params}
parsed_params: Dict[str, str] = encode_get_request_params({**filter_params, **extra_params})
return [
{
@ -118,7 +119,8 @@ class ClickhouseTrendsTotalVolume:
"date_from": filter.date_from if filter.display == TRENDS_CUMULATIVE else date,
"date_to": date,
}
parsed_params: Dict[str, Union[Any, int, str]] = {**filter_params, **extra_params}
parsed_params: Dict[str, str] = encode_get_request_params({**filter_params, **extra_params})
persons_url.append(
{
"filter": extra_params,

View File

@ -111,6 +111,72 @@
OFFSET 0
'
---
# name: ClickhouseTestTrends.test_insight_trends_clean_arg
'
/* request:api_projects_(?P<parent_lookup_team_id>[^_.]+)_insights_trend_?$ (ClickhouseInsightsViewSet) */
SELECT groupArray(day_start) as date,
groupArray(count) as data
FROM
(SELECT SUM(total) AS count,
day_start
from
(SELECT toUInt16(0) AS total,
toStartOfDay(toDateTime('2012-01-15 23:59:59') - toIntervalDay(number)) AS day_start
FROM numbers(dateDiff('day', toDateTime('2012-01-01 00:00:00'), toDateTime('2012-01-15 23:59:59')))
UNION ALL SELECT toUInt16(0) AS total,
toStartOfDay(toDateTime('2012-01-01 00:00:00'))
UNION ALL SELECT count(*) as data,
toDateTime(toStartOfDay(timestamp), 'UTC') as date
FROM
(SELECT e.timestamp as timestamp,
e.properties as properties
FROM events e
WHERE team_id = 2
AND event = '$pageview'
AND toStartOfDay(timestamp) >= toStartOfDay(toDateTime('2012-01-01 00:00:00'))
AND timestamp <= '2012-01-15 23:59:59'
AND has(['val'], trim(BOTH '"'
FROM JSONExtractRaw(properties, 'key'))) )
GROUP BY toStartOfDay(timestamp))
group by day_start
order by day_start) SETTINGS timeout_before_checking_execution_speed = 60
'
---
# name: ClickhouseTestTrends.test_insight_trends_clean_arg.1
'
/* request:api_projects_(?P<parent_lookup_team_id>[^_.]+)_actions_people_?$ (ClickhouseActionsViewSet) */
SELECT person_id AS actor_id
FROM
(SELECT e.timestamp as timestamp,
e.properties as properties,
pdi.person_id as person_id,
e.distinct_id as distinct_id,
e.team_id as team_id
FROM events e
INNER JOIN
(SELECT distinct_id,
argMax(person_id, version) as person_id
FROM person_distinct_id2
WHERE team_id = 2
GROUP BY distinct_id
HAVING argMax(is_deleted, version) = 0) AS pdi ON e.distinct_id = pdi.distinct_id
INNER JOIN
(SELECT id
FROM person
WHERE team_id = 2
GROUP BY id
HAVING max(is_deleted) = 0) person ON person.id = pdi.person_id
WHERE team_id = 2
AND event = '$pageview'
AND timestamp >= '2012-01-14 00:00:00'
AND timestamp <= '2012-01-14 23:59:59'
AND has(['val'], trim(BOTH '"'
FROM JSONExtractRaw(properties, 'key'))) )
GROUP BY actor_id
LIMIT 200
OFFSET 0
'
---
# name: ClickhouseTestTrends.test_insight_trends_cumulative
'
/* request:api_projects_(?P<parent_lookup_team_id>[^_.]+)_insights_trend_?$ (ClickhouseInsightsViewSet) */

View File

@ -62,6 +62,41 @@ class ClickhouseTestTrends(ClickhouseTestMixin, LicensedTestMixin, APIBaseTest):
[str(created_people["1"].uuid), str(created_people["2"].uuid)]
)
@snapshot_clickhouse_queries
def test_insight_trends_clean_arg(self):
events_by_actor = {
"1": [{"event": "$pageview", "timestamp": datetime(2012, 1, 14, 3), "properties": {"key": "val"}},],
"2": [{"event": "$pageview", "timestamp": datetime(2012, 1, 14, 3)},],
}
created_actors = journeys_for(events_by_actor, self.team)
with freeze_time("2012-01-15T04:01:34.000Z"):
request = TrendsRequest(
date_from="-14d",
display="ActionsLineGraph",
events=[
{
"id": "$pageview",
"math": None, # this argument will now be removed from the request instead of becoming a string
"name": "$pageview",
"custom_name": None,
"type": "events",
"order": 0,
"properties": [{"key": "key", "value": "val"}],
"math_property": None,
}
],
)
data = get_trends_time_series_ok(self.client, request, self.team)
actors = get_people_from_url_ok(self.client, data["$pageview"]["2012-01-14"].person_url)
# this would return 2 people prior to #8103 fix
# 'None' values have to be purged before formatting into the actor url
assert sorted([p["id"] for p in actors]) == sorted([str(created_actors["1"].uuid)])
@snapshot_clickhouse_queries
def test_insight_trends_aggregate(self):

View File

@ -16,6 +16,7 @@ from posthog.models.event import Event, EventManager
from posthog.models.filters.stickiness_filter import StickinessFilter
from posthog.models.person import Person
from posthog.queries import base
from posthog.utils import encode_get_request_params
from .base import BaseQuery, filter_events, filter_persons, handle_compare, process_entity_for_events
@ -94,7 +95,7 @@ class Stickiness(BaseQuery):
"entity_type": entity.type,
"entity_math": entity.math,
}
parsed_params: Dict[str, Union[Any, int, str]] = {**filter_params, **extra_params}
parsed_params: Dict[str, str] = encode_get_request_params({**filter_params, **extra_params})
persons_url.append(
{"filter": extra_params, "url": f"api/person/stickiness/?{urllib.parse.urlencode(parsed_params)}",}
)